applied-ai-018 commited on
Commit
4f49693
·
verified ·
1 Parent(s): 7edb135

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. env-llmeval/lib/python3.10/site-packages/aiohttp/__init__.py +240 -0
  3. env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc +0 -0
  4. env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc +0 -0
  5. env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc +0 -0
  6. env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc +0 -0
  7. env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc +0 -0
  8. env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc +0 -0
  9. env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc +0 -0
  10. env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc +0 -0
  11. env-llmeval/lib/python3.10/site-packages/aiohttp/_cparser.pxd +158 -0
  12. env-llmeval/lib/python3.10/site-packages/aiohttp/_find_header.pxd +2 -0
  13. env-llmeval/lib/python3.10/site-packages/aiohttp/_headers.pxi +83 -0
  14. env-llmeval/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so +0 -0
  15. env-llmeval/lib/python3.10/site-packages/aiohttp/_helpers.pyi +6 -0
  16. env-llmeval/lib/python3.10/site-packages/aiohttp/_helpers.pyx +35 -0
  17. env-llmeval/lib/python3.10/site-packages/aiohttp/_http_parser.pyx +838 -0
  18. env-llmeval/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so +0 -0
  19. env-llmeval/lib/python3.10/site-packages/aiohttp/_http_writer.pyx +163 -0
  20. env-llmeval/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so +0 -0
  21. env-llmeval/lib/python3.10/site-packages/aiohttp/_websocket.pyx +56 -0
  22. env-llmeval/lib/python3.10/site-packages/aiohttp/abc.py +209 -0
  23. env-llmeval/lib/python3.10/site-packages/aiohttp/base_protocol.py +95 -0
  24. env-llmeval/lib/python3.10/site-packages/aiohttp/client.py +1363 -0
  25. env-llmeval/lib/python3.10/site-packages/aiohttp/client_exceptions.py +346 -0
  26. env-llmeval/lib/python3.10/site-packages/aiohttp/client_proto.py +296 -0
  27. env-llmeval/lib/python3.10/site-packages/aiohttp/client_reqrep.py +1207 -0
  28. env-llmeval/lib/python3.10/site-packages/aiohttp/client_ws.py +315 -0
  29. env-llmeval/lib/python3.10/site-packages/aiohttp/compression_utils.py +157 -0
  30. env-llmeval/lib/python3.10/site-packages/aiohttp/connector.py +1511 -0
  31. env-llmeval/lib/python3.10/site-packages/aiohttp/cookiejar.py +419 -0
  32. env-llmeval/lib/python3.10/site-packages/aiohttp/formdata.py +182 -0
  33. env-llmeval/lib/python3.10/site-packages/aiohttp/hdrs.py +108 -0
  34. env-llmeval/lib/python3.10/site-packages/aiohttp/helpers.py +1029 -0
  35. env-llmeval/lib/python3.10/site-packages/aiohttp/http.py +72 -0
  36. env-llmeval/lib/python3.10/site-packages/aiohttp/http_exceptions.py +106 -0
  37. env-llmeval/lib/python3.10/site-packages/aiohttp/http_parser.py +1041 -0
  38. env-llmeval/lib/python3.10/site-packages/aiohttp/http_websocket.py +740 -0
  39. env-llmeval/lib/python3.10/site-packages/aiohttp/http_writer.py +198 -0
  40. env-llmeval/lib/python3.10/site-packages/aiohttp/locks.py +41 -0
  41. env-llmeval/lib/python3.10/site-packages/aiohttp/log.py +8 -0
  42. env-llmeval/lib/python3.10/site-packages/aiohttp/multipart.py +1008 -0
  43. env-llmeval/lib/python3.10/site-packages/aiohttp/payload.py +463 -0
  44. env-llmeval/lib/python3.10/site-packages/aiohttp/payload_streamer.py +75 -0
  45. env-llmeval/lib/python3.10/site-packages/aiohttp/py.typed +1 -0
  46. env-llmeval/lib/python3.10/site-packages/aiohttp/pytest_plugin.py +381 -0
  47. env-llmeval/lib/python3.10/site-packages/aiohttp/resolver.py +160 -0
  48. env-llmeval/lib/python3.10/site-packages/aiohttp/streams.py +684 -0
  49. env-llmeval/lib/python3.10/site-packages/aiohttp/tcp_helpers.py +37 -0
  50. env-llmeval/lib/python3.10/site-packages/aiohttp/test_utils.py +682 -0
.gitattributes CHANGED
@@ -214,3 +214,4 @@ llmeval-env/lib/python3.10/site-packages/scipy/special/cython_special.cpython-31
214
  llmeval-env/lib/python3.10/site-packages/scipy/linalg/_flapack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
215
  llmeval-env/lib/python3.10/site-packages/scipy/io/_fast_matrix_market/_fmm_core.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
216
  env-llmeval/lib/python3.10/site-packages/torch/lib/libtorch_cuda.so filter=lfs diff=lfs merge=lfs -text
 
 
214
  llmeval-env/lib/python3.10/site-packages/scipy/linalg/_flapack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
215
  llmeval-env/lib/python3.10/site-packages/scipy/io/_fast_matrix_market/_fmm_core.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
216
  env-llmeval/lib/python3.10/site-packages/torch/lib/libtorch_cuda.so filter=lfs diff=lfs merge=lfs -text
217
+ env-llmeval/lib/python3.10/site-packages/sklearn/_loss/_loss.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
env-llmeval/lib/python3.10/site-packages/aiohttp/__init__.py ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __version__ = "3.9.4"
2
+
3
+ from typing import TYPE_CHECKING, Tuple
4
+
5
+ from . import hdrs as hdrs
6
+ from .client import (
7
+ BaseConnector as BaseConnector,
8
+ ClientConnectionError as ClientConnectionError,
9
+ ClientConnectorCertificateError as ClientConnectorCertificateError,
10
+ ClientConnectorError as ClientConnectorError,
11
+ ClientConnectorSSLError as ClientConnectorSSLError,
12
+ ClientError as ClientError,
13
+ ClientHttpProxyError as ClientHttpProxyError,
14
+ ClientOSError as ClientOSError,
15
+ ClientPayloadError as ClientPayloadError,
16
+ ClientProxyConnectionError as ClientProxyConnectionError,
17
+ ClientRequest as ClientRequest,
18
+ ClientResponse as ClientResponse,
19
+ ClientResponseError as ClientResponseError,
20
+ ClientSession as ClientSession,
21
+ ClientSSLError as ClientSSLError,
22
+ ClientTimeout as ClientTimeout,
23
+ ClientWebSocketResponse as ClientWebSocketResponse,
24
+ ContentTypeError as ContentTypeError,
25
+ Fingerprint as Fingerprint,
26
+ InvalidURL as InvalidURL,
27
+ NamedPipeConnector as NamedPipeConnector,
28
+ RequestInfo as RequestInfo,
29
+ ServerConnectionError as ServerConnectionError,
30
+ ServerDisconnectedError as ServerDisconnectedError,
31
+ ServerFingerprintMismatch as ServerFingerprintMismatch,
32
+ ServerTimeoutError as ServerTimeoutError,
33
+ TCPConnector as TCPConnector,
34
+ TooManyRedirects as TooManyRedirects,
35
+ UnixConnector as UnixConnector,
36
+ WSServerHandshakeError as WSServerHandshakeError,
37
+ request as request,
38
+ )
39
+ from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
40
+ from .formdata import FormData as FormData
41
+ from .helpers import BasicAuth, ChainMapProxy, ETag
42
+ from .http import (
43
+ HttpVersion as HttpVersion,
44
+ HttpVersion10 as HttpVersion10,
45
+ HttpVersion11 as HttpVersion11,
46
+ WebSocketError as WebSocketError,
47
+ WSCloseCode as WSCloseCode,
48
+ WSMessage as WSMessage,
49
+ WSMsgType as WSMsgType,
50
+ )
51
+ from .multipart import (
52
+ BadContentDispositionHeader as BadContentDispositionHeader,
53
+ BadContentDispositionParam as BadContentDispositionParam,
54
+ BodyPartReader as BodyPartReader,
55
+ MultipartReader as MultipartReader,
56
+ MultipartWriter as MultipartWriter,
57
+ content_disposition_filename as content_disposition_filename,
58
+ parse_content_disposition as parse_content_disposition,
59
+ )
60
+ from .payload import (
61
+ PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
62
+ AsyncIterablePayload as AsyncIterablePayload,
63
+ BufferedReaderPayload as BufferedReaderPayload,
64
+ BytesIOPayload as BytesIOPayload,
65
+ BytesPayload as BytesPayload,
66
+ IOBasePayload as IOBasePayload,
67
+ JsonPayload as JsonPayload,
68
+ Payload as Payload,
69
+ StringIOPayload as StringIOPayload,
70
+ StringPayload as StringPayload,
71
+ TextIOPayload as TextIOPayload,
72
+ get_payload as get_payload,
73
+ payload_type as payload_type,
74
+ )
75
+ from .payload_streamer import streamer as streamer
76
+ from .resolver import (
77
+ AsyncResolver as AsyncResolver,
78
+ DefaultResolver as DefaultResolver,
79
+ ThreadedResolver as ThreadedResolver,
80
+ )
81
+ from .streams import (
82
+ EMPTY_PAYLOAD as EMPTY_PAYLOAD,
83
+ DataQueue as DataQueue,
84
+ EofStream as EofStream,
85
+ FlowControlDataQueue as FlowControlDataQueue,
86
+ StreamReader as StreamReader,
87
+ )
88
+ from .tracing import (
89
+ TraceConfig as TraceConfig,
90
+ TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
91
+ TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
92
+ TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
93
+ TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
94
+ TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
95
+ TraceDnsCacheHitParams as TraceDnsCacheHitParams,
96
+ TraceDnsCacheMissParams as TraceDnsCacheMissParams,
97
+ TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
98
+ TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
99
+ TraceRequestChunkSentParams as TraceRequestChunkSentParams,
100
+ TraceRequestEndParams as TraceRequestEndParams,
101
+ TraceRequestExceptionParams as TraceRequestExceptionParams,
102
+ TraceRequestRedirectParams as TraceRequestRedirectParams,
103
+ TraceRequestStartParams as TraceRequestStartParams,
104
+ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
105
+ )
106
+
107
+ if TYPE_CHECKING:
108
+ # At runtime these are lazy-loaded at the bottom of the file.
109
+ from .worker import (
110
+ GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
111
+ GunicornWebWorker as GunicornWebWorker,
112
+ )
113
+
114
+ __all__: Tuple[str, ...] = (
115
+ "hdrs",
116
+ # client
117
+ "BaseConnector",
118
+ "ClientConnectionError",
119
+ "ClientConnectorCertificateError",
120
+ "ClientConnectorError",
121
+ "ClientConnectorSSLError",
122
+ "ClientError",
123
+ "ClientHttpProxyError",
124
+ "ClientOSError",
125
+ "ClientPayloadError",
126
+ "ClientProxyConnectionError",
127
+ "ClientResponse",
128
+ "ClientRequest",
129
+ "ClientResponseError",
130
+ "ClientSSLError",
131
+ "ClientSession",
132
+ "ClientTimeout",
133
+ "ClientWebSocketResponse",
134
+ "ContentTypeError",
135
+ "Fingerprint",
136
+ "InvalidURL",
137
+ "RequestInfo",
138
+ "ServerConnectionError",
139
+ "ServerDisconnectedError",
140
+ "ServerFingerprintMismatch",
141
+ "ServerTimeoutError",
142
+ "TCPConnector",
143
+ "TooManyRedirects",
144
+ "UnixConnector",
145
+ "NamedPipeConnector",
146
+ "WSServerHandshakeError",
147
+ "request",
148
+ # cookiejar
149
+ "CookieJar",
150
+ "DummyCookieJar",
151
+ # formdata
152
+ "FormData",
153
+ # helpers
154
+ "BasicAuth",
155
+ "ChainMapProxy",
156
+ "ETag",
157
+ # http
158
+ "HttpVersion",
159
+ "HttpVersion10",
160
+ "HttpVersion11",
161
+ "WSMsgType",
162
+ "WSCloseCode",
163
+ "WSMessage",
164
+ "WebSocketError",
165
+ # multipart
166
+ "BadContentDispositionHeader",
167
+ "BadContentDispositionParam",
168
+ "BodyPartReader",
169
+ "MultipartReader",
170
+ "MultipartWriter",
171
+ "content_disposition_filename",
172
+ "parse_content_disposition",
173
+ # payload
174
+ "AsyncIterablePayload",
175
+ "BufferedReaderPayload",
176
+ "BytesIOPayload",
177
+ "BytesPayload",
178
+ "IOBasePayload",
179
+ "JsonPayload",
180
+ "PAYLOAD_REGISTRY",
181
+ "Payload",
182
+ "StringIOPayload",
183
+ "StringPayload",
184
+ "TextIOPayload",
185
+ "get_payload",
186
+ "payload_type",
187
+ # payload_streamer
188
+ "streamer",
189
+ # resolver
190
+ "AsyncResolver",
191
+ "DefaultResolver",
192
+ "ThreadedResolver",
193
+ # streams
194
+ "DataQueue",
195
+ "EMPTY_PAYLOAD",
196
+ "EofStream",
197
+ "FlowControlDataQueue",
198
+ "StreamReader",
199
+ # tracing
200
+ "TraceConfig",
201
+ "TraceConnectionCreateEndParams",
202
+ "TraceConnectionCreateStartParams",
203
+ "TraceConnectionQueuedEndParams",
204
+ "TraceConnectionQueuedStartParams",
205
+ "TraceConnectionReuseconnParams",
206
+ "TraceDnsCacheHitParams",
207
+ "TraceDnsCacheMissParams",
208
+ "TraceDnsResolveHostEndParams",
209
+ "TraceDnsResolveHostStartParams",
210
+ "TraceRequestChunkSentParams",
211
+ "TraceRequestEndParams",
212
+ "TraceRequestExceptionParams",
213
+ "TraceRequestRedirectParams",
214
+ "TraceRequestStartParams",
215
+ "TraceResponseChunkReceivedParams",
216
+ # workers (imported lazily with __getattr__)
217
+ "GunicornUVLoopWebWorker",
218
+ "GunicornWebWorker",
219
+ )
220
+
221
+
222
+ def __dir__() -> Tuple[str, ...]:
223
+ return __all__ + ("__author__", "__doc__")
224
+
225
+
226
+ def __getattr__(name: str) -> object:
227
+ global GunicornUVLoopWebWorker, GunicornWebWorker
228
+
229
+ # Importing gunicorn takes a long time (>100ms), so only import if actually needed.
230
+ if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
231
+ try:
232
+ from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
233
+ except ImportError:
234
+ return None
235
+
236
+ GunicornUVLoopWebWorker = guv # type: ignore[misc]
237
+ GunicornWebWorker = gw # type: ignore[misc]
238
+ return guv if name == "GunicornUVLoopWebWorker" else gw
239
+
240
+ raise AttributeError(f"module {__name__} has no attribute {name}")
env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (4.15 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc ADDED
Binary file (8.57 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc ADDED
Binary file (11.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc ADDED
Binary file (10.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc ADDED
Binary file (15.6 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc ADDED
Binary file (1.65 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc ADDED
Binary file (13.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc ADDED
Binary file (1.38 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/_cparser.pxd ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
2
+
3
+
4
+ cdef extern from "../vendor/llhttp/build/llhttp.h":
5
+
6
+ struct llhttp__internal_s:
7
+ int32_t _index
8
+ void* _span_pos0
9
+ void* _span_cb0
10
+ int32_t error
11
+ const char* reason
12
+ const char* error_pos
13
+ void* data
14
+ void* _current
15
+ uint64_t content_length
16
+ uint8_t type
17
+ uint8_t method
18
+ uint8_t http_major
19
+ uint8_t http_minor
20
+ uint8_t header_state
21
+ uint8_t lenient_flags
22
+ uint8_t upgrade
23
+ uint8_t finish
24
+ uint16_t flags
25
+ uint16_t status_code
26
+ void* settings
27
+
28
+ ctypedef llhttp__internal_s llhttp__internal_t
29
+ ctypedef llhttp__internal_t llhttp_t
30
+
31
+ ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
32
+ ctypedef int (*llhttp_cb)(llhttp_t*) except -1
33
+
34
+ struct llhttp_settings_s:
35
+ llhttp_cb on_message_begin
36
+ llhttp_data_cb on_url
37
+ llhttp_data_cb on_status
38
+ llhttp_data_cb on_header_field
39
+ llhttp_data_cb on_header_value
40
+ llhttp_cb on_headers_complete
41
+ llhttp_data_cb on_body
42
+ llhttp_cb on_message_complete
43
+ llhttp_cb on_chunk_header
44
+ llhttp_cb on_chunk_complete
45
+
46
+ llhttp_cb on_url_complete
47
+ llhttp_cb on_status_complete
48
+ llhttp_cb on_header_field_complete
49
+ llhttp_cb on_header_value_complete
50
+
51
+ ctypedef llhttp_settings_s llhttp_settings_t
52
+
53
+ enum llhttp_errno:
54
+ HPE_OK,
55
+ HPE_INTERNAL,
56
+ HPE_STRICT,
57
+ HPE_LF_EXPECTED,
58
+ HPE_UNEXPECTED_CONTENT_LENGTH,
59
+ HPE_CLOSED_CONNECTION,
60
+ HPE_INVALID_METHOD,
61
+ HPE_INVALID_URL,
62
+ HPE_INVALID_CONSTANT,
63
+ HPE_INVALID_VERSION,
64
+ HPE_INVALID_HEADER_TOKEN,
65
+ HPE_INVALID_CONTENT_LENGTH,
66
+ HPE_INVALID_CHUNK_SIZE,
67
+ HPE_INVALID_STATUS,
68
+ HPE_INVALID_EOF_STATE,
69
+ HPE_INVALID_TRANSFER_ENCODING,
70
+ HPE_CB_MESSAGE_BEGIN,
71
+ HPE_CB_HEADERS_COMPLETE,
72
+ HPE_CB_MESSAGE_COMPLETE,
73
+ HPE_CB_CHUNK_HEADER,
74
+ HPE_CB_CHUNK_COMPLETE,
75
+ HPE_PAUSED,
76
+ HPE_PAUSED_UPGRADE,
77
+ HPE_USER
78
+
79
+ ctypedef llhttp_errno llhttp_errno_t
80
+
81
+ enum llhttp_flags:
82
+ F_CHUNKED,
83
+ F_CONTENT_LENGTH
84
+
85
+ enum llhttp_type:
86
+ HTTP_REQUEST,
87
+ HTTP_RESPONSE,
88
+ HTTP_BOTH
89
+
90
+ enum llhttp_method:
91
+ HTTP_DELETE,
92
+ HTTP_GET,
93
+ HTTP_HEAD,
94
+ HTTP_POST,
95
+ HTTP_PUT,
96
+ HTTP_CONNECT,
97
+ HTTP_OPTIONS,
98
+ HTTP_TRACE,
99
+ HTTP_COPY,
100
+ HTTP_LOCK,
101
+ HTTP_MKCOL,
102
+ HTTP_MOVE,
103
+ HTTP_PROPFIND,
104
+ HTTP_PROPPATCH,
105
+ HTTP_SEARCH,
106
+ HTTP_UNLOCK,
107
+ HTTP_BIND,
108
+ HTTP_REBIND,
109
+ HTTP_UNBIND,
110
+ HTTP_ACL,
111
+ HTTP_REPORT,
112
+ HTTP_MKACTIVITY,
113
+ HTTP_CHECKOUT,
114
+ HTTP_MERGE,
115
+ HTTP_MSEARCH,
116
+ HTTP_NOTIFY,
117
+ HTTP_SUBSCRIBE,
118
+ HTTP_UNSUBSCRIBE,
119
+ HTTP_PATCH,
120
+ HTTP_PURGE,
121
+ HTTP_MKCALENDAR,
122
+ HTTP_LINK,
123
+ HTTP_UNLINK,
124
+ HTTP_SOURCE,
125
+ HTTP_PRI,
126
+ HTTP_DESCRIBE,
127
+ HTTP_ANNOUNCE,
128
+ HTTP_SETUP,
129
+ HTTP_PLAY,
130
+ HTTP_PAUSE,
131
+ HTTP_TEARDOWN,
132
+ HTTP_GET_PARAMETER,
133
+ HTTP_SET_PARAMETER,
134
+ HTTP_REDIRECT,
135
+ HTTP_RECORD,
136
+ HTTP_FLUSH
137
+
138
+ ctypedef llhttp_method llhttp_method_t;
139
+
140
+ void llhttp_settings_init(llhttp_settings_t* settings)
141
+ void llhttp_init(llhttp_t* parser, llhttp_type type,
142
+ const llhttp_settings_t* settings)
143
+
144
+ llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
145
+
146
+ int llhttp_should_keep_alive(const llhttp_t* parser)
147
+
148
+ void llhttp_resume_after_upgrade(llhttp_t* parser)
149
+
150
+ llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
151
+ const char* llhttp_get_error_reason(const llhttp_t* parser)
152
+ const char* llhttp_get_error_pos(const llhttp_t* parser)
153
+
154
+ const char* llhttp_method_name(llhttp_method_t method)
155
+
156
+ void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
157
+ void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
158
+ void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
env-llmeval/lib/python3.10/site-packages/aiohttp/_find_header.pxd ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ cdef extern from "_find_header.h":
2
+ int find_header(char *, int)
env-llmeval/lib/python3.10/site-packages/aiohttp/_headers.pxi ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The file is autogenerated from aiohttp/hdrs.py
2
+ # Run ./tools/gen.py to update it after the origin changing.
3
+
4
+ from . import hdrs
5
+ cdef tuple headers = (
6
+ hdrs.ACCEPT,
7
+ hdrs.ACCEPT_CHARSET,
8
+ hdrs.ACCEPT_ENCODING,
9
+ hdrs.ACCEPT_LANGUAGE,
10
+ hdrs.ACCEPT_RANGES,
11
+ hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
12
+ hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
13
+ hdrs.ACCESS_CONTROL_ALLOW_METHODS,
14
+ hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
15
+ hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
16
+ hdrs.ACCESS_CONTROL_MAX_AGE,
17
+ hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
18
+ hdrs.ACCESS_CONTROL_REQUEST_METHOD,
19
+ hdrs.AGE,
20
+ hdrs.ALLOW,
21
+ hdrs.AUTHORIZATION,
22
+ hdrs.CACHE_CONTROL,
23
+ hdrs.CONNECTION,
24
+ hdrs.CONTENT_DISPOSITION,
25
+ hdrs.CONTENT_ENCODING,
26
+ hdrs.CONTENT_LANGUAGE,
27
+ hdrs.CONTENT_LENGTH,
28
+ hdrs.CONTENT_LOCATION,
29
+ hdrs.CONTENT_MD5,
30
+ hdrs.CONTENT_RANGE,
31
+ hdrs.CONTENT_TRANSFER_ENCODING,
32
+ hdrs.CONTENT_TYPE,
33
+ hdrs.COOKIE,
34
+ hdrs.DATE,
35
+ hdrs.DESTINATION,
36
+ hdrs.DIGEST,
37
+ hdrs.ETAG,
38
+ hdrs.EXPECT,
39
+ hdrs.EXPIRES,
40
+ hdrs.FORWARDED,
41
+ hdrs.FROM,
42
+ hdrs.HOST,
43
+ hdrs.IF_MATCH,
44
+ hdrs.IF_MODIFIED_SINCE,
45
+ hdrs.IF_NONE_MATCH,
46
+ hdrs.IF_RANGE,
47
+ hdrs.IF_UNMODIFIED_SINCE,
48
+ hdrs.KEEP_ALIVE,
49
+ hdrs.LAST_EVENT_ID,
50
+ hdrs.LAST_MODIFIED,
51
+ hdrs.LINK,
52
+ hdrs.LOCATION,
53
+ hdrs.MAX_FORWARDS,
54
+ hdrs.ORIGIN,
55
+ hdrs.PRAGMA,
56
+ hdrs.PROXY_AUTHENTICATE,
57
+ hdrs.PROXY_AUTHORIZATION,
58
+ hdrs.RANGE,
59
+ hdrs.REFERER,
60
+ hdrs.RETRY_AFTER,
61
+ hdrs.SEC_WEBSOCKET_ACCEPT,
62
+ hdrs.SEC_WEBSOCKET_EXTENSIONS,
63
+ hdrs.SEC_WEBSOCKET_KEY,
64
+ hdrs.SEC_WEBSOCKET_KEY1,
65
+ hdrs.SEC_WEBSOCKET_PROTOCOL,
66
+ hdrs.SEC_WEBSOCKET_VERSION,
67
+ hdrs.SERVER,
68
+ hdrs.SET_COOKIE,
69
+ hdrs.TE,
70
+ hdrs.TRAILER,
71
+ hdrs.TRANSFER_ENCODING,
72
+ hdrs.URI,
73
+ hdrs.UPGRADE,
74
+ hdrs.USER_AGENT,
75
+ hdrs.VARY,
76
+ hdrs.VIA,
77
+ hdrs.WWW_AUTHENTICATE,
78
+ hdrs.WANT_DIGEST,
79
+ hdrs.WARNING,
80
+ hdrs.X_FORWARDED_FOR,
81
+ hdrs.X_FORWARDED_HOST,
82
+ hdrs.X_FORWARDED_PROTO,
83
+ )
env-llmeval/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (509 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/_helpers.pyi ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from typing import Any
2
+
3
+ class reify:
4
+ def __init__(self, wrapped: Any) -> None: ...
5
+ def __get__(self, inst: Any, owner: Any) -> Any: ...
6
+ def __set__(self, inst: Any, value: Any) -> None: ...
env-llmeval/lib/python3.10/site-packages/aiohttp/_helpers.pyx ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef class reify:
2
+ """Use as a class method decorator. It operates almost exactly like
3
+ the Python `@property` decorator, but it puts the result of the
4
+ method it decorates into the instance dict after the first call,
5
+ effectively replacing the function it decorates with an instance
6
+ variable. It is, in Python parlance, a data descriptor.
7
+
8
+ """
9
+
10
+ cdef object wrapped
11
+ cdef object name
12
+
13
+ def __init__(self, wrapped):
14
+ self.wrapped = wrapped
15
+ self.name = wrapped.__name__
16
+
17
+ @property
18
+ def __doc__(self):
19
+ return self.wrapped.__doc__
20
+
21
+ def __get__(self, inst, owner):
22
+ try:
23
+ try:
24
+ return inst._cache[self.name]
25
+ except KeyError:
26
+ val = self.wrapped(inst)
27
+ inst._cache[self.name] = val
28
+ return val
29
+ except AttributeError:
30
+ if inst is None:
31
+ return self
32
+ raise
33
+
34
+ def __set__(self, inst, value):
35
+ raise AttributeError("reified property is read-only")
env-llmeval/lib/python3.10/site-packages/aiohttp/_http_parser.pyx ADDED
@@ -0,0 +1,838 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #cython: language_level=3
2
+ #
3
+ # Based on https://github.com/MagicStack/httptools
4
+ #
5
+
6
+ from cpython cimport (
7
+ Py_buffer,
8
+ PyBUF_SIMPLE,
9
+ PyBuffer_Release,
10
+ PyBytes_AsString,
11
+ PyBytes_AsStringAndSize,
12
+ PyObject_GetBuffer,
13
+ )
14
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc
15
+ from libc.limits cimport ULLONG_MAX
16
+ from libc.string cimport memcpy
17
+
18
+ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
19
+ from yarl import URL as _URL
20
+
21
+ from aiohttp import hdrs
22
+ from aiohttp.helpers import DEBUG, set_exception
23
+
24
+ from .http_exceptions import (
25
+ BadHttpMessage,
26
+ BadStatusLine,
27
+ ContentLengthError,
28
+ InvalidHeader,
29
+ InvalidURLError,
30
+ LineTooLong,
31
+ PayloadEncodingError,
32
+ TransferEncodingError,
33
+ )
34
+ from .http_parser import DeflateBuffer as _DeflateBuffer
35
+ from .http_writer import (
36
+ HttpVersion as _HttpVersion,
37
+ HttpVersion10 as _HttpVersion10,
38
+ HttpVersion11 as _HttpVersion11,
39
+ )
40
+ from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
41
+
42
+ cimport cython
43
+
44
+ from aiohttp cimport _cparser as cparser
45
+
46
+ include "_headers.pxi"
47
+
48
+ from aiohttp cimport _find_header
49
+
50
+ DEF DEFAULT_FREELIST_SIZE = 250
51
+
52
+ cdef extern from "Python.h":
53
+ int PyByteArray_Resize(object, Py_ssize_t) except -1
54
+ Py_ssize_t PyByteArray_Size(object) except -1
55
+ char* PyByteArray_AsString(object)
56
+
57
+ __all__ = ('HttpRequestParser', 'HttpResponseParser',
58
+ 'RawRequestMessage', 'RawResponseMessage')
59
+
60
+ cdef object URL = _URL
61
+ cdef object URL_build = URL.build
62
+ cdef object CIMultiDict = _CIMultiDict
63
+ cdef object CIMultiDictProxy = _CIMultiDictProxy
64
+ cdef object HttpVersion = _HttpVersion
65
+ cdef object HttpVersion10 = _HttpVersion10
66
+ cdef object HttpVersion11 = _HttpVersion11
67
+ cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
68
+ cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
69
+ cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
70
+ cdef object StreamReader = _StreamReader
71
+ cdef object DeflateBuffer = _DeflateBuffer
72
+
73
+
74
+ cdef inline object extend(object buf, const char* at, size_t length):
75
+ cdef Py_ssize_t s
76
+ cdef char* ptr
77
+ s = PyByteArray_Size(buf)
78
+ PyByteArray_Resize(buf, s + length)
79
+ ptr = PyByteArray_AsString(buf)
80
+ memcpy(ptr + s, at, length)
81
+
82
+
83
+ DEF METHODS_COUNT = 46;
84
+
85
+ cdef list _http_method = []
86
+
87
+ for i in range(METHODS_COUNT):
88
+ _http_method.append(
89
+ cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
90
+
91
+
92
+ cdef inline str http_method_str(int i):
93
+ if i < METHODS_COUNT:
94
+ return <str>_http_method[i]
95
+ else:
96
+ return "<unknown>"
97
+
98
+ cdef inline object find_header(bytes raw_header):
99
+ cdef Py_ssize_t size
100
+ cdef char *buf
101
+ cdef int idx
102
+ PyBytes_AsStringAndSize(raw_header, &buf, &size)
103
+ idx = _find_header.find_header(buf, size)
104
+ if idx == -1:
105
+ return raw_header.decode('utf-8', 'surrogateescape')
106
+ return headers[idx]
107
+
108
+
109
+ @cython.freelist(DEFAULT_FREELIST_SIZE)
110
+ cdef class RawRequestMessage:
111
+ cdef readonly str method
112
+ cdef readonly str path
113
+ cdef readonly object version # HttpVersion
114
+ cdef readonly object headers # CIMultiDict
115
+ cdef readonly object raw_headers # tuple
116
+ cdef readonly object should_close
117
+ cdef readonly object compression
118
+ cdef readonly object upgrade
119
+ cdef readonly object chunked
120
+ cdef readonly object url # yarl.URL
121
+
122
+ def __init__(self, method, path, version, headers, raw_headers,
123
+ should_close, compression, upgrade, chunked, url):
124
+ self.method = method
125
+ self.path = path
126
+ self.version = version
127
+ self.headers = headers
128
+ self.raw_headers = raw_headers
129
+ self.should_close = should_close
130
+ self.compression = compression
131
+ self.upgrade = upgrade
132
+ self.chunked = chunked
133
+ self.url = url
134
+
135
+ def __repr__(self):
136
+ info = []
137
+ info.append(("method", self.method))
138
+ info.append(("path", self.path))
139
+ info.append(("version", self.version))
140
+ info.append(("headers", self.headers))
141
+ info.append(("raw_headers", self.raw_headers))
142
+ info.append(("should_close", self.should_close))
143
+ info.append(("compression", self.compression))
144
+ info.append(("upgrade", self.upgrade))
145
+ info.append(("chunked", self.chunked))
146
+ info.append(("url", self.url))
147
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
148
+ return '<RawRequestMessage(' + sinfo + ')>'
149
+
150
+ def _replace(self, **dct):
151
+ cdef RawRequestMessage ret
152
+ ret = _new_request_message(self.method,
153
+ self.path,
154
+ self.version,
155
+ self.headers,
156
+ self.raw_headers,
157
+ self.should_close,
158
+ self.compression,
159
+ self.upgrade,
160
+ self.chunked,
161
+ self.url)
162
+ if "method" in dct:
163
+ ret.method = dct["method"]
164
+ if "path" in dct:
165
+ ret.path = dct["path"]
166
+ if "version" in dct:
167
+ ret.version = dct["version"]
168
+ if "headers" in dct:
169
+ ret.headers = dct["headers"]
170
+ if "raw_headers" in dct:
171
+ ret.raw_headers = dct["raw_headers"]
172
+ if "should_close" in dct:
173
+ ret.should_close = dct["should_close"]
174
+ if "compression" in dct:
175
+ ret.compression = dct["compression"]
176
+ if "upgrade" in dct:
177
+ ret.upgrade = dct["upgrade"]
178
+ if "chunked" in dct:
179
+ ret.chunked = dct["chunked"]
180
+ if "url" in dct:
181
+ ret.url = dct["url"]
182
+ return ret
183
+
184
+ cdef _new_request_message(str method,
185
+ str path,
186
+ object version,
187
+ object headers,
188
+ object raw_headers,
189
+ bint should_close,
190
+ object compression,
191
+ bint upgrade,
192
+ bint chunked,
193
+ object url):
194
+ cdef RawRequestMessage ret
195
+ ret = RawRequestMessage.__new__(RawRequestMessage)
196
+ ret.method = method
197
+ ret.path = path
198
+ ret.version = version
199
+ ret.headers = headers
200
+ ret.raw_headers = raw_headers
201
+ ret.should_close = should_close
202
+ ret.compression = compression
203
+ ret.upgrade = upgrade
204
+ ret.chunked = chunked
205
+ ret.url = url
206
+ return ret
207
+
208
+
209
+ @cython.freelist(DEFAULT_FREELIST_SIZE)
210
+ cdef class RawResponseMessage:
211
+ cdef readonly object version # HttpVersion
212
+ cdef readonly int code
213
+ cdef readonly str reason
214
+ cdef readonly object headers # CIMultiDict
215
+ cdef readonly object raw_headers # tuple
216
+ cdef readonly object should_close
217
+ cdef readonly object compression
218
+ cdef readonly object upgrade
219
+ cdef readonly object chunked
220
+
221
+ def __init__(self, version, code, reason, headers, raw_headers,
222
+ should_close, compression, upgrade, chunked):
223
+ self.version = version
224
+ self.code = code
225
+ self.reason = reason
226
+ self.headers = headers
227
+ self.raw_headers = raw_headers
228
+ self.should_close = should_close
229
+ self.compression = compression
230
+ self.upgrade = upgrade
231
+ self.chunked = chunked
232
+
233
+ def __repr__(self):
234
+ info = []
235
+ info.append(("version", self.version))
236
+ info.append(("code", self.code))
237
+ info.append(("reason", self.reason))
238
+ info.append(("headers", self.headers))
239
+ info.append(("raw_headers", self.raw_headers))
240
+ info.append(("should_close", self.should_close))
241
+ info.append(("compression", self.compression))
242
+ info.append(("upgrade", self.upgrade))
243
+ info.append(("chunked", self.chunked))
244
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
245
+ return '<RawResponseMessage(' + sinfo + ')>'
246
+
247
+
248
+ cdef _new_response_message(object version,
249
+ int code,
250
+ str reason,
251
+ object headers,
252
+ object raw_headers,
253
+ bint should_close,
254
+ object compression,
255
+ bint upgrade,
256
+ bint chunked):
257
+ cdef RawResponseMessage ret
258
+ ret = RawResponseMessage.__new__(RawResponseMessage)
259
+ ret.version = version
260
+ ret.code = code
261
+ ret.reason = reason
262
+ ret.headers = headers
263
+ ret.raw_headers = raw_headers
264
+ ret.should_close = should_close
265
+ ret.compression = compression
266
+ ret.upgrade = upgrade
267
+ ret.chunked = chunked
268
+ return ret
269
+
270
+
271
+ @cython.internal
272
+ cdef class HttpParser:
273
+
274
+ cdef:
275
+ cparser.llhttp_t* _cparser
276
+ cparser.llhttp_settings_t* _csettings
277
+
278
+ bytearray _raw_name
279
+ bytearray _raw_value
280
+ bint _has_value
281
+
282
+ object _protocol
283
+ object _loop
284
+ object _timer
285
+
286
+ size_t _max_line_size
287
+ size_t _max_field_size
288
+ size_t _max_headers
289
+ bint _response_with_body
290
+ bint _read_until_eof
291
+
292
+ bint _started
293
+ object _url
294
+ bytearray _buf
295
+ str _path
296
+ str _reason
297
+ object _headers
298
+ list _raw_headers
299
+ bint _upgraded
300
+ list _messages
301
+ object _payload
302
+ bint _payload_error
303
+ object _payload_exception
304
+ object _last_error
305
+ bint _auto_decompress
306
+ int _limit
307
+
308
+ str _content_encoding
309
+
310
+ Py_buffer py_buf
311
+
312
+ def __cinit__(self):
313
+ self._cparser = <cparser.llhttp_t*> \
314
+ PyMem_Malloc(sizeof(cparser.llhttp_t))
315
+ if self._cparser is NULL:
316
+ raise MemoryError()
317
+
318
+ self._csettings = <cparser.llhttp_settings_t*> \
319
+ PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
320
+ if self._csettings is NULL:
321
+ raise MemoryError()
322
+
323
+ def __dealloc__(self):
324
+ PyMem_Free(self._cparser)
325
+ PyMem_Free(self._csettings)
326
+
327
+ cdef _init(
328
+ self, cparser.llhttp_type mode,
329
+ object protocol, object loop, int limit,
330
+ object timer=None,
331
+ size_t max_line_size=8190, size_t max_headers=32768,
332
+ size_t max_field_size=8190, payload_exception=None,
333
+ bint response_with_body=True, bint read_until_eof=False,
334
+ bint auto_decompress=True,
335
+ ):
336
+ cparser.llhttp_settings_init(self._csettings)
337
+ cparser.llhttp_init(self._cparser, mode, self._csettings)
338
+ self._cparser.data = <void*>self
339
+ self._cparser.content_length = 0
340
+
341
+ self._protocol = protocol
342
+ self._loop = loop
343
+ self._timer = timer
344
+
345
+ self._buf = bytearray()
346
+ self._payload = None
347
+ self._payload_error = 0
348
+ self._payload_exception = payload_exception
349
+ self._messages = []
350
+
351
+ self._raw_name = bytearray()
352
+ self._raw_value = bytearray()
353
+ self._has_value = False
354
+
355
+ self._max_line_size = max_line_size
356
+ self._max_headers = max_headers
357
+ self._max_field_size = max_field_size
358
+ self._response_with_body = response_with_body
359
+ self._read_until_eof = read_until_eof
360
+ self._upgraded = False
361
+ self._auto_decompress = auto_decompress
362
+ self._content_encoding = None
363
+
364
+ self._csettings.on_url = cb_on_url
365
+ self._csettings.on_status = cb_on_status
366
+ self._csettings.on_header_field = cb_on_header_field
367
+ self._csettings.on_header_value = cb_on_header_value
368
+ self._csettings.on_headers_complete = cb_on_headers_complete
369
+ self._csettings.on_body = cb_on_body
370
+ self._csettings.on_message_begin = cb_on_message_begin
371
+ self._csettings.on_message_complete = cb_on_message_complete
372
+ self._csettings.on_chunk_header = cb_on_chunk_header
373
+ self._csettings.on_chunk_complete = cb_on_chunk_complete
374
+
375
+ self._last_error = None
376
+ self._limit = limit
377
+
378
+ cdef _process_header(self):
379
+ if self._raw_name:
380
+ raw_name = bytes(self._raw_name)
381
+ raw_value = bytes(self._raw_value)
382
+
383
+ name = find_header(raw_name)
384
+ value = raw_value.decode('utf-8', 'surrogateescape')
385
+
386
+ self._headers.add(name, value)
387
+
388
+ if name is CONTENT_ENCODING:
389
+ self._content_encoding = value
390
+
391
+ PyByteArray_Resize(self._raw_name, 0)
392
+ PyByteArray_Resize(self._raw_value, 0)
393
+ self._has_value = False
394
+ self._raw_headers.append((raw_name, raw_value))
395
+
396
+ cdef _on_header_field(self, char* at, size_t length):
397
+ cdef Py_ssize_t size
398
+ cdef char *buf
399
+ if self._has_value:
400
+ self._process_header()
401
+
402
+ size = PyByteArray_Size(self._raw_name)
403
+ PyByteArray_Resize(self._raw_name, size + length)
404
+ buf = PyByteArray_AsString(self._raw_name)
405
+ memcpy(buf + size, at, length)
406
+
407
+ cdef _on_header_value(self, char* at, size_t length):
408
+ cdef Py_ssize_t size
409
+ cdef char *buf
410
+
411
+ size = PyByteArray_Size(self._raw_value)
412
+ PyByteArray_Resize(self._raw_value, size + length)
413
+ buf = PyByteArray_AsString(self._raw_value)
414
+ memcpy(buf + size, at, length)
415
+ self._has_value = True
416
+
417
+ cdef _on_headers_complete(self):
418
+ self._process_header()
419
+
420
+ method = http_method_str(self._cparser.method)
421
+ should_close = not cparser.llhttp_should_keep_alive(self._cparser)
422
+ upgrade = self._cparser.upgrade
423
+ chunked = self._cparser.flags & cparser.F_CHUNKED
424
+
425
+ raw_headers = tuple(self._raw_headers)
426
+ headers = CIMultiDictProxy(self._headers)
427
+
428
+ if upgrade or self._cparser.method == cparser.HTTP_CONNECT:
429
+ self._upgraded = True
430
+
431
+ # do not support old websocket spec
432
+ if SEC_WEBSOCKET_KEY1 in headers:
433
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
434
+
435
+ encoding = None
436
+ enc = self._content_encoding
437
+ if enc is not None:
438
+ self._content_encoding = None
439
+ enc = enc.lower()
440
+ if enc in ('gzip', 'deflate', 'br'):
441
+ encoding = enc
442
+
443
+ if self._cparser.type == cparser.HTTP_REQUEST:
444
+ msg = _new_request_message(
445
+ method, self._path,
446
+ self.http_version(), headers, raw_headers,
447
+ should_close, encoding, upgrade, chunked, self._url)
448
+ else:
449
+ msg = _new_response_message(
450
+ self.http_version(), self._cparser.status_code, self._reason,
451
+ headers, raw_headers, should_close, encoding,
452
+ upgrade, chunked)
453
+
454
+ if (
455
+ ULLONG_MAX > self._cparser.content_length > 0 or chunked or
456
+ self._cparser.method == cparser.HTTP_CONNECT or
457
+ (self._cparser.status_code >= 199 and
458
+ self._cparser.content_length == 0 and
459
+ self._read_until_eof)
460
+ ):
461
+ payload = StreamReader(
462
+ self._protocol, timer=self._timer, loop=self._loop,
463
+ limit=self._limit)
464
+ else:
465
+ payload = EMPTY_PAYLOAD
466
+
467
+ self._payload = payload
468
+ if encoding is not None and self._auto_decompress:
469
+ self._payload = DeflateBuffer(payload, encoding)
470
+
471
+ if not self._response_with_body:
472
+ payload = EMPTY_PAYLOAD
473
+
474
+ self._messages.append((msg, payload))
475
+
476
+ cdef _on_message_complete(self):
477
+ self._payload.feed_eof()
478
+ self._payload = None
479
+
480
+ cdef _on_chunk_header(self):
481
+ self._payload.begin_http_chunk_receiving()
482
+
483
+ cdef _on_chunk_complete(self):
484
+ self._payload.end_http_chunk_receiving()
485
+
486
+ cdef object _on_status_complete(self):
487
+ pass
488
+
489
+ cdef inline http_version(self):
490
+ cdef cparser.llhttp_t* parser = self._cparser
491
+
492
+ if parser.http_major == 1:
493
+ if parser.http_minor == 0:
494
+ return HttpVersion10
495
+ elif parser.http_minor == 1:
496
+ return HttpVersion11
497
+
498
+ return HttpVersion(parser.http_major, parser.http_minor)
499
+
500
+ ### Public API ###
501
+
502
+ def feed_eof(self):
503
+ cdef bytes desc
504
+
505
+ if self._payload is not None:
506
+ if self._cparser.flags & cparser.F_CHUNKED:
507
+ raise TransferEncodingError(
508
+ "Not enough data for satisfy transfer length header.")
509
+ elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
510
+ raise ContentLengthError(
511
+ "Not enough data for satisfy content length header.")
512
+ elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
513
+ desc = cparser.llhttp_get_error_reason(self._cparser)
514
+ raise PayloadEncodingError(desc.decode('latin-1'))
515
+ else:
516
+ self._payload.feed_eof()
517
+ elif self._started:
518
+ self._on_headers_complete()
519
+ if self._messages:
520
+ return self._messages[-1][0]
521
+
522
+ def feed_data(self, data):
523
+ cdef:
524
+ size_t data_len
525
+ size_t nb
526
+ cdef cparser.llhttp_errno_t errno
527
+
528
+ PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
529
+ data_len = <size_t>self.py_buf.len
530
+
531
+ errno = cparser.llhttp_execute(
532
+ self._cparser,
533
+ <char*>self.py_buf.buf,
534
+ data_len)
535
+
536
+ if errno is cparser.HPE_PAUSED_UPGRADE:
537
+ cparser.llhttp_resume_after_upgrade(self._cparser)
538
+
539
+ nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
540
+
541
+ PyBuffer_Release(&self.py_buf)
542
+
543
+ if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
544
+ if self._payload_error == 0:
545
+ if self._last_error is not None:
546
+ ex = self._last_error
547
+ self._last_error = None
548
+ else:
549
+ after = cparser.llhttp_get_error_pos(self._cparser)
550
+ before = data[:after - <char*>self.py_buf.buf]
551
+ after_b = after.split(b"\r\n", 1)[0]
552
+ before = before.rsplit(b"\r\n", 1)[-1]
553
+ data = before + after_b
554
+ pointer = " " * (len(repr(before))-1) + "^"
555
+ ex = parser_error_from_errno(self._cparser, data, pointer)
556
+ self._payload = None
557
+ raise ex
558
+
559
+ if self._messages:
560
+ messages = self._messages
561
+ self._messages = []
562
+ else:
563
+ messages = ()
564
+
565
+ if self._upgraded:
566
+ return messages, True, data[nb:]
567
+ else:
568
+ return messages, False, b''
569
+
570
+ def set_upgraded(self, val):
571
+ self._upgraded = val
572
+
573
+
574
+ cdef class HttpRequestParser(HttpParser):
575
+
576
+ def __init__(
577
+ self, protocol, loop, int limit, timer=None,
578
+ size_t max_line_size=8190, size_t max_headers=32768,
579
+ size_t max_field_size=8190, payload_exception=None,
580
+ bint response_with_body=True, bint read_until_eof=False,
581
+ bint auto_decompress=True,
582
+ ):
583
+ self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
584
+ max_line_size, max_headers, max_field_size,
585
+ payload_exception, response_with_body, read_until_eof,
586
+ auto_decompress)
587
+
588
+ cdef object _on_status_complete(self):
589
+ cdef int idx1, idx2
590
+ if not self._buf:
591
+ return
592
+ self._path = self._buf.decode('utf-8', 'surrogateescape')
593
+ try:
594
+ idx3 = len(self._path)
595
+ if self._cparser.method == cparser.HTTP_CONNECT:
596
+ # authority-form,
597
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
598
+ self._url = URL.build(authority=self._path, encoded=True)
599
+ elif idx3 > 1 and self._path[0] == '/':
600
+ # origin-form,
601
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
602
+ idx1 = self._path.find("?")
603
+ if idx1 == -1:
604
+ query = ""
605
+ idx2 = self._path.find("#")
606
+ if idx2 == -1:
607
+ path = self._path
608
+ fragment = ""
609
+ else:
610
+ path = self._path[0: idx2]
611
+ fragment = self._path[idx2+1:]
612
+
613
+ else:
614
+ path = self._path[0:idx1]
615
+ idx1 += 1
616
+ idx2 = self._path.find("#", idx1+1)
617
+ if idx2 == -1:
618
+ query = self._path[idx1:]
619
+ fragment = ""
620
+ else:
621
+ query = self._path[idx1: idx2]
622
+ fragment = self._path[idx2+1:]
623
+
624
+ self._url = URL.build(
625
+ path=path,
626
+ query_string=query,
627
+ fragment=fragment,
628
+ encoded=True,
629
+ )
630
+ else:
631
+ # absolute-form for proxy maybe,
632
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
633
+ self._url = URL(self._path, encoded=True)
634
+ finally:
635
+ PyByteArray_Resize(self._buf, 0)
636
+
637
+
638
+ cdef class HttpResponseParser(HttpParser):
639
+
640
+ def __init__(
641
+ self, protocol, loop, int limit, timer=None,
642
+ size_t max_line_size=8190, size_t max_headers=32768,
643
+ size_t max_field_size=8190, payload_exception=None,
644
+ bint response_with_body=True, bint read_until_eof=False,
645
+ bint auto_decompress=True
646
+ ):
647
+ self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
648
+ max_line_size, max_headers, max_field_size,
649
+ payload_exception, response_with_body, read_until_eof,
650
+ auto_decompress)
651
+ # Use strict parsing on dev mode, so users are warned about broken servers.
652
+ if not DEBUG:
653
+ cparser.llhttp_set_lenient_headers(self._cparser, 1)
654
+ cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
655
+ cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
656
+
657
+ cdef object _on_status_complete(self):
658
+ if self._buf:
659
+ self._reason = self._buf.decode('utf-8', 'surrogateescape')
660
+ PyByteArray_Resize(self._buf, 0)
661
+ else:
662
+ self._reason = self._reason or ''
663
+
664
+ cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
665
+ cdef HttpParser pyparser = <HttpParser>parser.data
666
+
667
+ pyparser._started = True
668
+ pyparser._headers = CIMultiDict()
669
+ pyparser._raw_headers = []
670
+ PyByteArray_Resize(pyparser._buf, 0)
671
+ pyparser._path = None
672
+ pyparser._reason = None
673
+ return 0
674
+
675
+
676
+ cdef int cb_on_url(cparser.llhttp_t* parser,
677
+ const char *at, size_t length) except -1:
678
+ cdef HttpParser pyparser = <HttpParser>parser.data
679
+ try:
680
+ if length > pyparser._max_line_size:
681
+ raise LineTooLong(
682
+ 'Status line is too long', pyparser._max_line_size, length)
683
+ extend(pyparser._buf, at, length)
684
+ except BaseException as ex:
685
+ pyparser._last_error = ex
686
+ return -1
687
+ else:
688
+ return 0
689
+
690
+
691
+ cdef int cb_on_status(cparser.llhttp_t* parser,
692
+ const char *at, size_t length) except -1:
693
+ cdef HttpParser pyparser = <HttpParser>parser.data
694
+ cdef str reason
695
+ try:
696
+ if length > pyparser._max_line_size:
697
+ raise LineTooLong(
698
+ 'Status line is too long', pyparser._max_line_size, length)
699
+ extend(pyparser._buf, at, length)
700
+ except BaseException as ex:
701
+ pyparser._last_error = ex
702
+ return -1
703
+ else:
704
+ return 0
705
+
706
+
707
+ cdef int cb_on_header_field(cparser.llhttp_t* parser,
708
+ const char *at, size_t length) except -1:
709
+ cdef HttpParser pyparser = <HttpParser>parser.data
710
+ cdef Py_ssize_t size
711
+ try:
712
+ pyparser._on_status_complete()
713
+ size = len(pyparser._raw_name) + length
714
+ if size > pyparser._max_field_size:
715
+ raise LineTooLong(
716
+ 'Header name is too long', pyparser._max_field_size, size)
717
+ pyparser._on_header_field(at, length)
718
+ except BaseException as ex:
719
+ pyparser._last_error = ex
720
+ return -1
721
+ else:
722
+ return 0
723
+
724
+
725
+ cdef int cb_on_header_value(cparser.llhttp_t* parser,
726
+ const char *at, size_t length) except -1:
727
+ cdef HttpParser pyparser = <HttpParser>parser.data
728
+ cdef Py_ssize_t size
729
+ try:
730
+ size = len(pyparser._raw_value) + length
731
+ if size > pyparser._max_field_size:
732
+ raise LineTooLong(
733
+ 'Header value is too long', pyparser._max_field_size, size)
734
+ pyparser._on_header_value(at, length)
735
+ except BaseException as ex:
736
+ pyparser._last_error = ex
737
+ return -1
738
+ else:
739
+ return 0
740
+
741
+
742
+ cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
743
+ cdef HttpParser pyparser = <HttpParser>parser.data
744
+ try:
745
+ pyparser._on_status_complete()
746
+ pyparser._on_headers_complete()
747
+ except BaseException as exc:
748
+ pyparser._last_error = exc
749
+ return -1
750
+ else:
751
+ if (
752
+ pyparser._cparser.upgrade or
753
+ pyparser._cparser.method == cparser.HTTP_CONNECT
754
+ ):
755
+ return 2
756
+ else:
757
+ return 0
758
+
759
+
760
+ cdef int cb_on_body(cparser.llhttp_t* parser,
761
+ const char *at, size_t length) except -1:
762
+ cdef HttpParser pyparser = <HttpParser>parser.data
763
+ cdef bytes body = at[:length]
764
+ try:
765
+ pyparser._payload.feed_data(body, length)
766
+ except BaseException as underlying_exc:
767
+ reraised_exc = underlying_exc
768
+ if pyparser._payload_exception is not None:
769
+ reraised_exc = pyparser._payload_exception(str(underlying_exc))
770
+
771
+ set_exception(pyparser._payload, reraised_exc, underlying_exc)
772
+
773
+ pyparser._payload_error = 1
774
+ return -1
775
+ else:
776
+ return 0
777
+
778
+
779
+ cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
780
+ cdef HttpParser pyparser = <HttpParser>parser.data
781
+ try:
782
+ pyparser._started = False
783
+ pyparser._on_message_complete()
784
+ except BaseException as exc:
785
+ pyparser._last_error = exc
786
+ return -1
787
+ else:
788
+ return 0
789
+
790
+
791
+ cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
792
+ cdef HttpParser pyparser = <HttpParser>parser.data
793
+ try:
794
+ pyparser._on_chunk_header()
795
+ except BaseException as exc:
796
+ pyparser._last_error = exc
797
+ return -1
798
+ else:
799
+ return 0
800
+
801
+
802
+ cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
803
+ cdef HttpParser pyparser = <HttpParser>parser.data
804
+ try:
805
+ pyparser._on_chunk_complete()
806
+ except BaseException as exc:
807
+ pyparser._last_error = exc
808
+ return -1
809
+ else:
810
+ return 0
811
+
812
+
813
+ cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
814
+ cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
815
+ cdef bytes desc = cparser.llhttp_get_error_reason(parser)
816
+
817
+ err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
818
+
819
+ if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
820
+ cparser.HPE_CB_HEADERS_COMPLETE,
821
+ cparser.HPE_CB_MESSAGE_COMPLETE,
822
+ cparser.HPE_CB_CHUNK_HEADER,
823
+ cparser.HPE_CB_CHUNK_COMPLETE,
824
+ cparser.HPE_INVALID_CONSTANT,
825
+ cparser.HPE_INVALID_HEADER_TOKEN,
826
+ cparser.HPE_INVALID_CONTENT_LENGTH,
827
+ cparser.HPE_INVALID_CHUNK_SIZE,
828
+ cparser.HPE_INVALID_EOF_STATE,
829
+ cparser.HPE_INVALID_TRANSFER_ENCODING}:
830
+ return BadHttpMessage(err_msg)
831
+ elif errno in {cparser.HPE_INVALID_STATUS,
832
+ cparser.HPE_INVALID_METHOD,
833
+ cparser.HPE_INVALID_VERSION}:
834
+ return BadStatusLine(error=err_msg)
835
+ elif errno == cparser.HPE_INVALID_URL:
836
+ return InvalidURLError(err_msg)
837
+
838
+ return BadHttpMessage(err_msg)
env-llmeval/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (459 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/_http_writer.pyx ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython.bytes cimport PyBytes_FromStringAndSize
2
+ from cpython.exc cimport PyErr_NoMemory
3
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
4
+ from cpython.object cimport PyObject_Str
5
+ from libc.stdint cimport uint8_t, uint64_t
6
+ from libc.string cimport memcpy
7
+
8
+ from multidict import istr
9
+
10
+ DEF BUF_SIZE = 16 * 1024 # 16KiB
11
+ cdef char BUFFER[BUF_SIZE]
12
+
13
+ cdef object _istr = istr
14
+
15
+
16
+ # ----------------- writer ---------------------------
17
+
18
+ cdef struct Writer:
19
+ char *buf
20
+ Py_ssize_t size
21
+ Py_ssize_t pos
22
+
23
+
24
+ cdef inline void _init_writer(Writer* writer):
25
+ writer.buf = &BUFFER[0]
26
+ writer.size = BUF_SIZE
27
+ writer.pos = 0
28
+
29
+
30
+ cdef inline void _release_writer(Writer* writer):
31
+ if writer.buf != BUFFER:
32
+ PyMem_Free(writer.buf)
33
+
34
+
35
+ cdef inline int _write_byte(Writer* writer, uint8_t ch):
36
+ cdef char * buf
37
+ cdef Py_ssize_t size
38
+
39
+ if writer.pos == writer.size:
40
+ # reallocate
41
+ size = writer.size + BUF_SIZE
42
+ if writer.buf == BUFFER:
43
+ buf = <char*>PyMem_Malloc(size)
44
+ if buf == NULL:
45
+ PyErr_NoMemory()
46
+ return -1
47
+ memcpy(buf, writer.buf, writer.size)
48
+ else:
49
+ buf = <char*>PyMem_Realloc(writer.buf, size)
50
+ if buf == NULL:
51
+ PyErr_NoMemory()
52
+ return -1
53
+ writer.buf = buf
54
+ writer.size = size
55
+ writer.buf[writer.pos] = <char>ch
56
+ writer.pos += 1
57
+ return 0
58
+
59
+
60
+ cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
61
+ cdef uint64_t utf = <uint64_t> symbol
62
+
63
+ if utf < 0x80:
64
+ return _write_byte(writer, <uint8_t>utf)
65
+ elif utf < 0x800:
66
+ if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
67
+ return -1
68
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
69
+ elif 0xD800 <= utf <= 0xDFFF:
70
+ # surogate pair, ignored
71
+ return 0
72
+ elif utf < 0x10000:
73
+ if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
74
+ return -1
75
+ if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
76
+ return -1
77
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
78
+ elif utf > 0x10FFFF:
79
+ # symbol is too large
80
+ return 0
81
+ else:
82
+ if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
83
+ return -1
84
+ if _write_byte(writer,
85
+ <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
86
+ return -1
87
+ if _write_byte(writer,
88
+ <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
89
+ return -1
90
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
91
+
92
+
93
+ cdef inline int _write_str(Writer* writer, str s):
94
+ cdef Py_UCS4 ch
95
+ for ch in s:
96
+ if _write_utf8(writer, ch) < 0:
97
+ return -1
98
+
99
+
100
+ # --------------- _serialize_headers ----------------------
101
+
102
+ cdef str to_str(object s):
103
+ typ = type(s)
104
+ if typ is str:
105
+ return <str>s
106
+ elif typ is _istr:
107
+ return PyObject_Str(s)
108
+ elif not isinstance(s, str):
109
+ raise TypeError("Cannot serialize non-str key {!r}".format(s))
110
+ else:
111
+ return str(s)
112
+
113
+
114
+ cdef void _safe_header(str string) except *:
115
+ if "\r" in string or "\n" in string:
116
+ raise ValueError(
117
+ "Newline or carriage return character detected in HTTP status message or "
118
+ "header. This is a potential security issue."
119
+ )
120
+
121
+
122
+ def _serialize_headers(str status_line, headers):
123
+ cdef Writer writer
124
+ cdef object key
125
+ cdef object val
126
+ cdef bytes ret
127
+
128
+ _init_writer(&writer)
129
+
130
+ for key, val in headers.items():
131
+ _safe_header(to_str(key))
132
+ _safe_header(to_str(val))
133
+
134
+ try:
135
+ if _write_str(&writer, status_line) < 0:
136
+ raise
137
+ if _write_byte(&writer, b'\r') < 0:
138
+ raise
139
+ if _write_byte(&writer, b'\n') < 0:
140
+ raise
141
+
142
+ for key, val in headers.items():
143
+ if _write_str(&writer, to_str(key)) < 0:
144
+ raise
145
+ if _write_byte(&writer, b':') < 0:
146
+ raise
147
+ if _write_byte(&writer, b' ') < 0:
148
+ raise
149
+ if _write_str(&writer, to_str(val)) < 0:
150
+ raise
151
+ if _write_byte(&writer, b'\r') < 0:
152
+ raise
153
+ if _write_byte(&writer, b'\n') < 0:
154
+ raise
155
+
156
+ if _write_byte(&writer, b'\r') < 0:
157
+ raise
158
+ if _write_byte(&writer, b'\n') < 0:
159
+ raise
160
+
161
+ return PyBytes_FromStringAndSize(writer.buf, writer.pos)
162
+ finally:
163
+ _release_writer(&writer)
env-llmeval/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (234 kB). View file
 
env-llmeval/lib/python3.10/site-packages/aiohttp/_websocket.pyx ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython cimport PyBytes_AsString
2
+
3
+
4
+ #from cpython cimport PyByteArray_AsString # cython still not exports that
5
+ cdef extern from "Python.h":
6
+ char* PyByteArray_AsString(bytearray ba) except NULL
7
+
8
+ from libc.stdint cimport uint32_t, uint64_t, uintmax_t
9
+
10
+
11
+ def _websocket_mask_cython(object mask, object data):
12
+ """Note, this function mutates its `data` argument
13
+ """
14
+ cdef:
15
+ Py_ssize_t data_len, i
16
+ # bit operations on signed integers are implementation-specific
17
+ unsigned char * in_buf
18
+ const unsigned char * mask_buf
19
+ uint32_t uint32_msk
20
+ uint64_t uint64_msk
21
+
22
+ assert len(mask) == 4
23
+
24
+ if not isinstance(mask, bytes):
25
+ mask = bytes(mask)
26
+
27
+ if isinstance(data, bytearray):
28
+ data = <bytearray>data
29
+ else:
30
+ data = bytearray(data)
31
+
32
+ data_len = len(data)
33
+ in_buf = <unsigned char*>PyByteArray_AsString(data)
34
+ mask_buf = <const unsigned char*>PyBytes_AsString(mask)
35
+ uint32_msk = (<uint32_t*>mask_buf)[0]
36
+
37
+ # TODO: align in_data ptr to achieve even faster speeds
38
+ # does it need in python ?! malloc() always aligns to sizeof(long) bytes
39
+
40
+ if sizeof(size_t) >= 8:
41
+ uint64_msk = uint32_msk
42
+ uint64_msk = (uint64_msk << 32) | uint32_msk
43
+
44
+ while data_len >= 8:
45
+ (<uint64_t*>in_buf)[0] ^= uint64_msk
46
+ in_buf += 8
47
+ data_len -= 8
48
+
49
+
50
+ while data_len >= 4:
51
+ (<uint32_t*>in_buf)[0] ^= uint32_msk
52
+ in_buf += 4
53
+ data_len -= 4
54
+
55
+ for i in range(0, data_len):
56
+ in_buf[i] ^= mask_buf[i]
env-llmeval/lib/python3.10/site-packages/aiohttp/abc.py ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import logging
3
+ from abc import ABC, abstractmethod
4
+ from collections.abc import Sized
5
+ from http.cookies import BaseCookie, Morsel
6
+ from typing import (
7
+ TYPE_CHECKING,
8
+ Any,
9
+ Awaitable,
10
+ Callable,
11
+ Dict,
12
+ Generator,
13
+ Iterable,
14
+ List,
15
+ Optional,
16
+ Tuple,
17
+ )
18
+
19
+ from multidict import CIMultiDict
20
+ from yarl import URL
21
+
22
+ from .helpers import get_running_loop
23
+ from .typedefs import LooseCookies
24
+
25
+ if TYPE_CHECKING:
26
+ from .web_app import Application
27
+ from .web_exceptions import HTTPException
28
+ from .web_request import BaseRequest, Request
29
+ from .web_response import StreamResponse
30
+ else:
31
+ BaseRequest = Request = Application = StreamResponse = None
32
+ HTTPException = None
33
+
34
+
35
+ class AbstractRouter(ABC):
36
+ def __init__(self) -> None:
37
+ self._frozen = False
38
+
39
+ def post_init(self, app: Application) -> None:
40
+ """Post init stage.
41
+
42
+ Not an abstract method for sake of backward compatibility,
43
+ but if the router wants to be aware of the application
44
+ it can override this.
45
+ """
46
+
47
+ @property
48
+ def frozen(self) -> bool:
49
+ return self._frozen
50
+
51
+ def freeze(self) -> None:
52
+ """Freeze router."""
53
+ self._frozen = True
54
+
55
+ @abstractmethod
56
+ async def resolve(self, request: Request) -> "AbstractMatchInfo":
57
+ """Return MATCH_INFO for given request"""
58
+
59
+
60
+ class AbstractMatchInfo(ABC):
61
+ @property # pragma: no branch
62
+ @abstractmethod
63
+ def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
64
+ """Execute matched request handler"""
65
+
66
+ @property
67
+ @abstractmethod
68
+ def expect_handler(
69
+ self,
70
+ ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
71
+ """Expect handler for 100-continue processing"""
72
+
73
+ @property # pragma: no branch
74
+ @abstractmethod
75
+ def http_exception(self) -> Optional[HTTPException]:
76
+ """HTTPException instance raised on router's resolving, or None"""
77
+
78
+ @abstractmethod # pragma: no branch
79
+ def get_info(self) -> Dict[str, Any]:
80
+ """Return a dict with additional info useful for introspection"""
81
+
82
+ @property # pragma: no branch
83
+ @abstractmethod
84
+ def apps(self) -> Tuple[Application, ...]:
85
+ """Stack of nested applications.
86
+
87
+ Top level application is left-most element.
88
+
89
+ """
90
+
91
+ @abstractmethod
92
+ def add_app(self, app: Application) -> None:
93
+ """Add application to the nested apps stack."""
94
+
95
+ @abstractmethod
96
+ def freeze(self) -> None:
97
+ """Freeze the match info.
98
+
99
+ The method is called after route resolution.
100
+
101
+ After the call .add_app() is forbidden.
102
+
103
+ """
104
+
105
+
106
+ class AbstractView(ABC):
107
+ """Abstract class based view."""
108
+
109
+ def __init__(self, request: Request) -> None:
110
+ self._request = request
111
+
112
+ @property
113
+ def request(self) -> Request:
114
+ """Request instance."""
115
+ return self._request
116
+
117
+ @abstractmethod
118
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
119
+ """Execute the view handler."""
120
+
121
+
122
+ class AbstractResolver(ABC):
123
+ """Abstract DNS resolver."""
124
+
125
+ @abstractmethod
126
+ async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
127
+ """Return IP address for given hostname"""
128
+
129
+ @abstractmethod
130
+ async def close(self) -> None:
131
+ """Release resolver"""
132
+
133
+
134
+ if TYPE_CHECKING:
135
+ IterableBase = Iterable[Morsel[str]]
136
+ else:
137
+ IterableBase = Iterable
138
+
139
+
140
+ ClearCookiePredicate = Callable[["Morsel[str]"], bool]
141
+
142
+
143
+ class AbstractCookieJar(Sized, IterableBase):
144
+ """Abstract Cookie Jar."""
145
+
146
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
147
+ self._loop = get_running_loop(loop)
148
+
149
+ @abstractmethod
150
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
151
+ """Clear all cookies if no predicate is passed."""
152
+
153
+ @abstractmethod
154
+ def clear_domain(self, domain: str) -> None:
155
+ """Clear all cookies for domain and all subdomains."""
156
+
157
+ @abstractmethod
158
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
159
+ """Update cookies."""
160
+
161
+ @abstractmethod
162
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
163
+ """Return the jar's cookies filtered by their attributes."""
164
+
165
+
166
+ class AbstractStreamWriter(ABC):
167
+ """Abstract stream writer."""
168
+
169
+ buffer_size = 0
170
+ output_size = 0
171
+ length: Optional[int] = 0
172
+
173
+ @abstractmethod
174
+ async def write(self, chunk: bytes) -> None:
175
+ """Write chunk into stream."""
176
+
177
+ @abstractmethod
178
+ async def write_eof(self, chunk: bytes = b"") -> None:
179
+ """Write last chunk."""
180
+
181
+ @abstractmethod
182
+ async def drain(self) -> None:
183
+ """Flush the write buffer."""
184
+
185
+ @abstractmethod
186
+ def enable_compression(self, encoding: str = "deflate") -> None:
187
+ """Enable HTTP body compression"""
188
+
189
+ @abstractmethod
190
+ def enable_chunking(self) -> None:
191
+ """Enable HTTP chunked mode"""
192
+
193
+ @abstractmethod
194
+ async def write_headers(
195
+ self, status_line: str, headers: "CIMultiDict[str]"
196
+ ) -> None:
197
+ """Write HTTP headers"""
198
+
199
+
200
+ class AbstractAccessLogger(ABC):
201
+ """Abstract writer to access log."""
202
+
203
+ def __init__(self, logger: logging.Logger, log_format: str) -> None:
204
+ self.logger = logger
205
+ self.log_format = log_format
206
+
207
+ @abstractmethod
208
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
209
+ """Emit log to logger."""
env-llmeval/lib/python3.10/site-packages/aiohttp/base_protocol.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from typing import Optional, cast
3
+
4
+ from .helpers import set_exception
5
+ from .tcp_helpers import tcp_nodelay
6
+
7
+
8
+ class BaseProtocol(asyncio.Protocol):
9
+ __slots__ = (
10
+ "_loop",
11
+ "_paused",
12
+ "_drain_waiter",
13
+ "_connection_lost",
14
+ "_reading_paused",
15
+ "transport",
16
+ )
17
+
18
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
19
+ self._loop: asyncio.AbstractEventLoop = loop
20
+ self._paused = False
21
+ self._drain_waiter: Optional[asyncio.Future[None]] = None
22
+ self._reading_paused = False
23
+
24
+ self.transport: Optional[asyncio.Transport] = None
25
+
26
+ @property
27
+ def connected(self) -> bool:
28
+ """Return True if the connection is open."""
29
+ return self.transport is not None
30
+
31
+ def pause_writing(self) -> None:
32
+ assert not self._paused
33
+ self._paused = True
34
+
35
+ def resume_writing(self) -> None:
36
+ assert self._paused
37
+ self._paused = False
38
+
39
+ waiter = self._drain_waiter
40
+ if waiter is not None:
41
+ self._drain_waiter = None
42
+ if not waiter.done():
43
+ waiter.set_result(None)
44
+
45
+ def pause_reading(self) -> None:
46
+ if not self._reading_paused and self.transport is not None:
47
+ try:
48
+ self.transport.pause_reading()
49
+ except (AttributeError, NotImplementedError, RuntimeError):
50
+ pass
51
+ self._reading_paused = True
52
+
53
+ def resume_reading(self) -> None:
54
+ if self._reading_paused and self.transport is not None:
55
+ try:
56
+ self.transport.resume_reading()
57
+ except (AttributeError, NotImplementedError, RuntimeError):
58
+ pass
59
+ self._reading_paused = False
60
+
61
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
62
+ tr = cast(asyncio.Transport, transport)
63
+ tcp_nodelay(tr, True)
64
+ self.transport = tr
65
+
66
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
67
+ # Wake up the writer if currently paused.
68
+ self.transport = None
69
+ if not self._paused:
70
+ return
71
+ waiter = self._drain_waiter
72
+ if waiter is None:
73
+ return
74
+ self._drain_waiter = None
75
+ if waiter.done():
76
+ return
77
+ if exc is None:
78
+ waiter.set_result(None)
79
+ else:
80
+ set_exception(
81
+ waiter,
82
+ ConnectionError("Connection lost"),
83
+ exc,
84
+ )
85
+
86
+ async def _drain_helper(self) -> None:
87
+ if not self.connected:
88
+ raise ConnectionResetError("Connection lost")
89
+ if not self._paused:
90
+ return
91
+ waiter = self._drain_waiter
92
+ if waiter is None:
93
+ waiter = self._loop.create_future()
94
+ self._drain_waiter = waiter
95
+ await asyncio.shield(waiter)
env-llmeval/lib/python3.10/site-packages/aiohttp/client.py ADDED
@@ -0,0 +1,1363 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP Client for asyncio."""
2
+
3
+ import asyncio
4
+ import base64
5
+ import hashlib
6
+ import json
7
+ import os
8
+ import sys
9
+ import traceback
10
+ import warnings
11
+ from contextlib import suppress
12
+ from types import SimpleNamespace, TracebackType
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Awaitable,
17
+ Callable,
18
+ Coroutine,
19
+ Final,
20
+ FrozenSet,
21
+ Generator,
22
+ Generic,
23
+ Iterable,
24
+ List,
25
+ Mapping,
26
+ Optional,
27
+ Set,
28
+ Tuple,
29
+ Type,
30
+ TypeVar,
31
+ Union,
32
+ )
33
+
34
+ import attr
35
+ from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
36
+ from yarl import URL
37
+
38
+ from . import hdrs, http, payload
39
+ from .abc import AbstractCookieJar
40
+ from .client_exceptions import (
41
+ ClientConnectionError as ClientConnectionError,
42
+ ClientConnectorCertificateError as ClientConnectorCertificateError,
43
+ ClientConnectorError as ClientConnectorError,
44
+ ClientConnectorSSLError as ClientConnectorSSLError,
45
+ ClientError as ClientError,
46
+ ClientHttpProxyError as ClientHttpProxyError,
47
+ ClientOSError as ClientOSError,
48
+ ClientPayloadError as ClientPayloadError,
49
+ ClientProxyConnectionError as ClientProxyConnectionError,
50
+ ClientResponseError as ClientResponseError,
51
+ ClientSSLError as ClientSSLError,
52
+ ContentTypeError as ContentTypeError,
53
+ InvalidURL as InvalidURL,
54
+ ServerConnectionError as ServerConnectionError,
55
+ ServerDisconnectedError as ServerDisconnectedError,
56
+ ServerFingerprintMismatch as ServerFingerprintMismatch,
57
+ ServerTimeoutError as ServerTimeoutError,
58
+ TooManyRedirects as TooManyRedirects,
59
+ WSServerHandshakeError as WSServerHandshakeError,
60
+ )
61
+ from .client_reqrep import (
62
+ ClientRequest as ClientRequest,
63
+ ClientResponse as ClientResponse,
64
+ Fingerprint as Fingerprint,
65
+ RequestInfo as RequestInfo,
66
+ _merge_ssl_params,
67
+ )
68
+ from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
69
+ from .connector import (
70
+ BaseConnector as BaseConnector,
71
+ NamedPipeConnector as NamedPipeConnector,
72
+ TCPConnector as TCPConnector,
73
+ UnixConnector as UnixConnector,
74
+ )
75
+ from .cookiejar import CookieJar
76
+ from .helpers import (
77
+ _SENTINEL,
78
+ DEBUG,
79
+ BasicAuth,
80
+ TimeoutHandle,
81
+ ceil_timeout,
82
+ get_env_proxy_for_url,
83
+ get_running_loop,
84
+ method_must_be_empty_body,
85
+ sentinel,
86
+ strip_auth_from_url,
87
+ )
88
+ from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
89
+ from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
90
+ from .streams import FlowControlDataQueue
91
+ from .tracing import Trace, TraceConfig
92
+ from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
93
+
94
+ __all__ = (
95
+ # client_exceptions
96
+ "ClientConnectionError",
97
+ "ClientConnectorCertificateError",
98
+ "ClientConnectorError",
99
+ "ClientConnectorSSLError",
100
+ "ClientError",
101
+ "ClientHttpProxyError",
102
+ "ClientOSError",
103
+ "ClientPayloadError",
104
+ "ClientProxyConnectionError",
105
+ "ClientResponseError",
106
+ "ClientSSLError",
107
+ "ContentTypeError",
108
+ "InvalidURL",
109
+ "ServerConnectionError",
110
+ "ServerDisconnectedError",
111
+ "ServerFingerprintMismatch",
112
+ "ServerTimeoutError",
113
+ "TooManyRedirects",
114
+ "WSServerHandshakeError",
115
+ # client_reqrep
116
+ "ClientRequest",
117
+ "ClientResponse",
118
+ "Fingerprint",
119
+ "RequestInfo",
120
+ # connector
121
+ "BaseConnector",
122
+ "TCPConnector",
123
+ "UnixConnector",
124
+ "NamedPipeConnector",
125
+ # client_ws
126
+ "ClientWebSocketResponse",
127
+ # client
128
+ "ClientSession",
129
+ "ClientTimeout",
130
+ "request",
131
+ )
132
+
133
+
134
+ if TYPE_CHECKING:
135
+ from ssl import SSLContext
136
+ else:
137
+ SSLContext = None
138
+
139
+
140
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
141
+ class ClientTimeout:
142
+ total: Optional[float] = None
143
+ connect: Optional[float] = None
144
+ sock_read: Optional[float] = None
145
+ sock_connect: Optional[float] = None
146
+ ceil_threshold: float = 5
147
+
148
+ # pool_queue_timeout: Optional[float] = None
149
+ # dns_resolution_timeout: Optional[float] = None
150
+ # socket_connect_timeout: Optional[float] = None
151
+ # connection_acquiring_timeout: Optional[float] = None
152
+ # new_connection_timeout: Optional[float] = None
153
+ # http_header_timeout: Optional[float] = None
154
+ # response_body_timeout: Optional[float] = None
155
+
156
+ # to create a timeout specific for a single request, either
157
+ # - create a completely new one to overwrite the default
158
+ # - or use http://www.attrs.org/en/stable/api.html#attr.evolve
159
+ # to overwrite the defaults
160
+
161
+
162
+ # 5 Minute default read timeout
163
+ DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
164
+
165
+ _RetType = TypeVar("_RetType")
166
+ _CharsetResolver = Callable[[ClientResponse, bytes], str]
167
+
168
+
169
+ class ClientSession:
170
+ """First-class interface for making HTTP requests."""
171
+
172
+ ATTRS = frozenset(
173
+ [
174
+ "_base_url",
175
+ "_source_traceback",
176
+ "_connector",
177
+ "requote_redirect_url",
178
+ "_loop",
179
+ "_cookie_jar",
180
+ "_connector_owner",
181
+ "_default_auth",
182
+ "_version",
183
+ "_json_serialize",
184
+ "_requote_redirect_url",
185
+ "_timeout",
186
+ "_raise_for_status",
187
+ "_auto_decompress",
188
+ "_trust_env",
189
+ "_default_headers",
190
+ "_skip_auto_headers",
191
+ "_request_class",
192
+ "_response_class",
193
+ "_ws_response_class",
194
+ "_trace_configs",
195
+ "_read_bufsize",
196
+ "_max_line_size",
197
+ "_max_field_size",
198
+ "_resolve_charset",
199
+ ]
200
+ )
201
+
202
+ _source_traceback: Optional[traceback.StackSummary] = None
203
+ _connector: Optional[BaseConnector] = None
204
+
205
+ def __init__(
206
+ self,
207
+ base_url: Optional[StrOrURL] = None,
208
+ *,
209
+ connector: Optional[BaseConnector] = None,
210
+ loop: Optional[asyncio.AbstractEventLoop] = None,
211
+ cookies: Optional[LooseCookies] = None,
212
+ headers: Optional[LooseHeaders] = None,
213
+ skip_auto_headers: Optional[Iterable[str]] = None,
214
+ auth: Optional[BasicAuth] = None,
215
+ json_serialize: JSONEncoder = json.dumps,
216
+ request_class: Type[ClientRequest] = ClientRequest,
217
+ response_class: Type[ClientResponse] = ClientResponse,
218
+ ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
219
+ version: HttpVersion = http.HttpVersion11,
220
+ cookie_jar: Optional[AbstractCookieJar] = None,
221
+ connector_owner: bool = True,
222
+ raise_for_status: Union[
223
+ bool, Callable[[ClientResponse], Awaitable[None]]
224
+ ] = False,
225
+ read_timeout: Union[float, _SENTINEL] = sentinel,
226
+ conn_timeout: Optional[float] = None,
227
+ timeout: Union[object, ClientTimeout] = sentinel,
228
+ auto_decompress: bool = True,
229
+ trust_env: bool = False,
230
+ requote_redirect_url: bool = True,
231
+ trace_configs: Optional[List[TraceConfig]] = None,
232
+ read_bufsize: int = 2**16,
233
+ max_line_size: int = 8190,
234
+ max_field_size: int = 8190,
235
+ fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
236
+ ) -> None:
237
+ if loop is None:
238
+ if connector is not None:
239
+ loop = connector._loop
240
+
241
+ loop = get_running_loop(loop)
242
+
243
+ if base_url is None or isinstance(base_url, URL):
244
+ self._base_url: Optional[URL] = base_url
245
+ else:
246
+ self._base_url = URL(base_url)
247
+ assert (
248
+ self._base_url.origin() == self._base_url
249
+ ), "Only absolute URLs without path part are supported"
250
+
251
+ if connector is None:
252
+ connector = TCPConnector(loop=loop)
253
+
254
+ if connector._loop is not loop:
255
+ raise RuntimeError("Session and connector has to use same event loop")
256
+
257
+ self._loop = loop
258
+
259
+ if loop.get_debug():
260
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
261
+
262
+ if cookie_jar is None:
263
+ cookie_jar = CookieJar(loop=loop)
264
+ self._cookie_jar = cookie_jar
265
+
266
+ if cookies is not None:
267
+ self._cookie_jar.update_cookies(cookies)
268
+
269
+ self._connector = connector
270
+ self._connector_owner = connector_owner
271
+ self._default_auth = auth
272
+ self._version = version
273
+ self._json_serialize = json_serialize
274
+ if timeout is sentinel or timeout is None:
275
+ self._timeout = DEFAULT_TIMEOUT
276
+ if read_timeout is not sentinel:
277
+ warnings.warn(
278
+ "read_timeout is deprecated, " "use timeout argument instead",
279
+ DeprecationWarning,
280
+ stacklevel=2,
281
+ )
282
+ self._timeout = attr.evolve(self._timeout, total=read_timeout)
283
+ if conn_timeout is not None:
284
+ self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
285
+ warnings.warn(
286
+ "conn_timeout is deprecated, " "use timeout argument instead",
287
+ DeprecationWarning,
288
+ stacklevel=2,
289
+ )
290
+ else:
291
+ if not isinstance(timeout, ClientTimeout):
292
+ raise ValueError(
293
+ f"timeout parameter cannot be of {type(timeout)} type, "
294
+ "please use 'timeout=ClientTimeout(...)'",
295
+ )
296
+ self._timeout = timeout
297
+ if read_timeout is not sentinel:
298
+ raise ValueError(
299
+ "read_timeout and timeout parameters "
300
+ "conflict, please setup "
301
+ "timeout.read"
302
+ )
303
+ if conn_timeout is not None:
304
+ raise ValueError(
305
+ "conn_timeout and timeout parameters "
306
+ "conflict, please setup "
307
+ "timeout.connect"
308
+ )
309
+ self._raise_for_status = raise_for_status
310
+ self._auto_decompress = auto_decompress
311
+ self._trust_env = trust_env
312
+ self._requote_redirect_url = requote_redirect_url
313
+ self._read_bufsize = read_bufsize
314
+ self._max_line_size = max_line_size
315
+ self._max_field_size = max_field_size
316
+
317
+ # Convert to list of tuples
318
+ if headers:
319
+ real_headers: CIMultiDict[str] = CIMultiDict(headers)
320
+ else:
321
+ real_headers = CIMultiDict()
322
+ self._default_headers: CIMultiDict[str] = real_headers
323
+ if skip_auto_headers is not None:
324
+ self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
325
+ else:
326
+ self._skip_auto_headers = frozenset()
327
+
328
+ self._request_class = request_class
329
+ self._response_class = response_class
330
+ self._ws_response_class = ws_response_class
331
+
332
+ self._trace_configs = trace_configs or []
333
+ for trace_config in self._trace_configs:
334
+ trace_config.freeze()
335
+
336
+ self._resolve_charset = fallback_charset_resolver
337
+
338
+ def __init_subclass__(cls: Type["ClientSession"]) -> None:
339
+ warnings.warn(
340
+ "Inheritance class {} from ClientSession "
341
+ "is discouraged".format(cls.__name__),
342
+ DeprecationWarning,
343
+ stacklevel=2,
344
+ )
345
+
346
+ if DEBUG:
347
+
348
+ def __setattr__(self, name: str, val: Any) -> None:
349
+ if name not in self.ATTRS:
350
+ warnings.warn(
351
+ "Setting custom ClientSession.{} attribute "
352
+ "is discouraged".format(name),
353
+ DeprecationWarning,
354
+ stacklevel=2,
355
+ )
356
+ super().__setattr__(name, val)
357
+
358
+ def __del__(self, _warnings: Any = warnings) -> None:
359
+ if not self.closed:
360
+ kwargs = {"source": self}
361
+ _warnings.warn(
362
+ f"Unclosed client session {self!r}", ResourceWarning, **kwargs
363
+ )
364
+ context = {"client_session": self, "message": "Unclosed client session"}
365
+ if self._source_traceback is not None:
366
+ context["source_traceback"] = self._source_traceback
367
+ self._loop.call_exception_handler(context)
368
+
369
+ def request(
370
+ self, method: str, url: StrOrURL, **kwargs: Any
371
+ ) -> "_RequestContextManager":
372
+ """Perform HTTP request."""
373
+ return _RequestContextManager(self._request(method, url, **kwargs))
374
+
375
+ def _build_url(self, str_or_url: StrOrURL) -> URL:
376
+ url = URL(str_or_url)
377
+ if self._base_url is None:
378
+ return url
379
+ else:
380
+ assert not url.is_absolute() and url.path.startswith("/")
381
+ return self._base_url.join(url)
382
+
383
+ async def _request(
384
+ self,
385
+ method: str,
386
+ str_or_url: StrOrURL,
387
+ *,
388
+ params: Optional[Mapping[str, str]] = None,
389
+ data: Any = None,
390
+ json: Any = None,
391
+ cookies: Optional[LooseCookies] = None,
392
+ headers: Optional[LooseHeaders] = None,
393
+ skip_auto_headers: Optional[Iterable[str]] = None,
394
+ auth: Optional[BasicAuth] = None,
395
+ allow_redirects: bool = True,
396
+ max_redirects: int = 10,
397
+ compress: Optional[str] = None,
398
+ chunked: Optional[bool] = None,
399
+ expect100: bool = False,
400
+ raise_for_status: Union[
401
+ None, bool, Callable[[ClientResponse], Awaitable[None]]
402
+ ] = None,
403
+ read_until_eof: bool = True,
404
+ proxy: Optional[StrOrURL] = None,
405
+ proxy_auth: Optional[BasicAuth] = None,
406
+ timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
407
+ verify_ssl: Optional[bool] = None,
408
+ fingerprint: Optional[bytes] = None,
409
+ ssl_context: Optional[SSLContext] = None,
410
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
411
+ server_hostname: Optional[str] = None,
412
+ proxy_headers: Optional[LooseHeaders] = None,
413
+ trace_request_ctx: Optional[SimpleNamespace] = None,
414
+ read_bufsize: Optional[int] = None,
415
+ auto_decompress: Optional[bool] = None,
416
+ max_line_size: Optional[int] = None,
417
+ max_field_size: Optional[int] = None,
418
+ ) -> ClientResponse:
419
+
420
+ # NOTE: timeout clamps existing connect and read timeouts. We cannot
421
+ # set the default to None because we need to detect if the user wants
422
+ # to use the existing timeouts by setting timeout to None.
423
+
424
+ if self.closed:
425
+ raise RuntimeError("Session is closed")
426
+
427
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
428
+
429
+ if data is not None and json is not None:
430
+ raise ValueError(
431
+ "data and json parameters can not be used at the same time"
432
+ )
433
+ elif json is not None:
434
+ data = payload.JsonPayload(json, dumps=self._json_serialize)
435
+
436
+ if not isinstance(chunked, bool) and chunked is not None:
437
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
438
+
439
+ redirects = 0
440
+ history = []
441
+ version = self._version
442
+ params = params or {}
443
+
444
+ # Merge with default headers and transform to CIMultiDict
445
+ headers = self._prepare_headers(headers)
446
+ proxy_headers = self._prepare_headers(proxy_headers)
447
+
448
+ try:
449
+ url = self._build_url(str_or_url)
450
+ except ValueError as e:
451
+ raise InvalidURL(str_or_url) from e
452
+
453
+ skip_headers = set(self._skip_auto_headers)
454
+ if skip_auto_headers is not None:
455
+ for i in skip_auto_headers:
456
+ skip_headers.add(istr(i))
457
+
458
+ if proxy is not None:
459
+ try:
460
+ proxy = URL(proxy)
461
+ except ValueError as e:
462
+ raise InvalidURL(proxy) from e
463
+
464
+ if timeout is sentinel:
465
+ real_timeout: ClientTimeout = self._timeout
466
+ else:
467
+ if not isinstance(timeout, ClientTimeout):
468
+ real_timeout = ClientTimeout(total=timeout)
469
+ else:
470
+ real_timeout = timeout
471
+ # timeout is cumulative for all request operations
472
+ # (request, redirects, responses, data consuming)
473
+ tm = TimeoutHandle(
474
+ self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
475
+ )
476
+ handle = tm.start()
477
+
478
+ if read_bufsize is None:
479
+ read_bufsize = self._read_bufsize
480
+
481
+ if auto_decompress is None:
482
+ auto_decompress = self._auto_decompress
483
+
484
+ if max_line_size is None:
485
+ max_line_size = self._max_line_size
486
+
487
+ if max_field_size is None:
488
+ max_field_size = self._max_field_size
489
+
490
+ traces = [
491
+ Trace(
492
+ self,
493
+ trace_config,
494
+ trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
495
+ )
496
+ for trace_config in self._trace_configs
497
+ ]
498
+
499
+ for trace in traces:
500
+ await trace.send_request_start(method, url.update_query(params), headers)
501
+
502
+ timer = tm.timer()
503
+ try:
504
+ with timer:
505
+ while True:
506
+ url, auth_from_url = strip_auth_from_url(url)
507
+ if auth and auth_from_url:
508
+ raise ValueError(
509
+ "Cannot combine AUTH argument with "
510
+ "credentials encoded in URL"
511
+ )
512
+
513
+ if auth is None:
514
+ auth = auth_from_url
515
+ if auth is None:
516
+ auth = self._default_auth
517
+ # It would be confusing if we support explicit
518
+ # Authorization header with auth argument
519
+ if (
520
+ headers is not None
521
+ and auth is not None
522
+ and hdrs.AUTHORIZATION in headers
523
+ ):
524
+ raise ValueError(
525
+ "Cannot combine AUTHORIZATION header "
526
+ "with AUTH argument or credentials "
527
+ "encoded in URL"
528
+ )
529
+
530
+ all_cookies = self._cookie_jar.filter_cookies(url)
531
+
532
+ if cookies is not None:
533
+ tmp_cookie_jar = CookieJar()
534
+ tmp_cookie_jar.update_cookies(cookies)
535
+ req_cookies = tmp_cookie_jar.filter_cookies(url)
536
+ if req_cookies:
537
+ all_cookies.load(req_cookies)
538
+
539
+ if proxy is not None:
540
+ proxy = URL(proxy)
541
+ elif self._trust_env:
542
+ with suppress(LookupError):
543
+ proxy, proxy_auth = get_env_proxy_for_url(url)
544
+
545
+ req = self._request_class(
546
+ method,
547
+ url,
548
+ params=params,
549
+ headers=headers,
550
+ skip_auto_headers=skip_headers,
551
+ data=data,
552
+ cookies=all_cookies,
553
+ auth=auth,
554
+ version=version,
555
+ compress=compress,
556
+ chunked=chunked,
557
+ expect100=expect100,
558
+ loop=self._loop,
559
+ response_class=self._response_class,
560
+ proxy=proxy,
561
+ proxy_auth=proxy_auth,
562
+ timer=timer,
563
+ session=self,
564
+ ssl=ssl if ssl is not None else True,
565
+ server_hostname=server_hostname,
566
+ proxy_headers=proxy_headers,
567
+ traces=traces,
568
+ trust_env=self.trust_env,
569
+ )
570
+
571
+ # connection timeout
572
+ try:
573
+ async with ceil_timeout(
574
+ real_timeout.connect,
575
+ ceil_threshold=real_timeout.ceil_threshold,
576
+ ):
577
+ assert self._connector is not None
578
+ conn = await self._connector.connect(
579
+ req, traces=traces, timeout=real_timeout
580
+ )
581
+ except asyncio.TimeoutError as exc:
582
+ raise ServerTimeoutError(
583
+ "Connection timeout " "to host {}".format(url)
584
+ ) from exc
585
+
586
+ assert conn.transport is not None
587
+
588
+ assert conn.protocol is not None
589
+ conn.protocol.set_response_params(
590
+ timer=timer,
591
+ skip_payload=method_must_be_empty_body(method),
592
+ read_until_eof=read_until_eof,
593
+ auto_decompress=auto_decompress,
594
+ read_timeout=real_timeout.sock_read,
595
+ read_bufsize=read_bufsize,
596
+ timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
597
+ max_line_size=max_line_size,
598
+ max_field_size=max_field_size,
599
+ )
600
+
601
+ try:
602
+ try:
603
+ resp = await req.send(conn)
604
+ try:
605
+ await resp.start(conn)
606
+ except BaseException:
607
+ resp.close()
608
+ raise
609
+ except BaseException:
610
+ conn.close()
611
+ raise
612
+ except ClientError:
613
+ raise
614
+ except OSError as exc:
615
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
616
+ raise
617
+ raise ClientOSError(*exc.args) from exc
618
+
619
+ self._cookie_jar.update_cookies(resp.cookies, resp.url)
620
+
621
+ # redirects
622
+ if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
623
+
624
+ for trace in traces:
625
+ await trace.send_request_redirect(
626
+ method, url.update_query(params), headers, resp
627
+ )
628
+
629
+ redirects += 1
630
+ history.append(resp)
631
+ if max_redirects and redirects >= max_redirects:
632
+ resp.close()
633
+ raise TooManyRedirects(
634
+ history[0].request_info, tuple(history)
635
+ )
636
+
637
+ # For 301 and 302, mimic IE, now changed in RFC
638
+ # https://github.com/kennethreitz/requests/pull/269
639
+ if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
640
+ resp.status in (301, 302) and resp.method == hdrs.METH_POST
641
+ ):
642
+ method = hdrs.METH_GET
643
+ data = None
644
+ if headers.get(hdrs.CONTENT_LENGTH):
645
+ headers.pop(hdrs.CONTENT_LENGTH)
646
+
647
+ r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
648
+ hdrs.URI
649
+ )
650
+ if r_url is None:
651
+ # see github.com/aio-libs/aiohttp/issues/2022
652
+ break
653
+ else:
654
+ # reading from correct redirection
655
+ # response is forbidden
656
+ resp.release()
657
+
658
+ try:
659
+ parsed_url = URL(
660
+ r_url, encoded=not self._requote_redirect_url
661
+ )
662
+
663
+ except ValueError as e:
664
+ raise InvalidURL(r_url) from e
665
+
666
+ scheme = parsed_url.scheme
667
+ if scheme not in ("http", "https", ""):
668
+ resp.close()
669
+ raise ValueError("Can redirect only to http or https")
670
+ elif not scheme:
671
+ parsed_url = url.join(parsed_url)
672
+
673
+ if url.origin() != parsed_url.origin():
674
+ auth = None
675
+ headers.pop(hdrs.AUTHORIZATION, None)
676
+
677
+ url = parsed_url
678
+ params = {}
679
+ resp.release()
680
+ continue
681
+
682
+ break
683
+
684
+ # check response status
685
+ if raise_for_status is None:
686
+ raise_for_status = self._raise_for_status
687
+
688
+ if raise_for_status is None:
689
+ pass
690
+ elif callable(raise_for_status):
691
+ await raise_for_status(resp)
692
+ elif raise_for_status:
693
+ resp.raise_for_status()
694
+
695
+ # register connection
696
+ if handle is not None:
697
+ if resp.connection is not None:
698
+ resp.connection.add_callback(handle.cancel)
699
+ else:
700
+ handle.cancel()
701
+
702
+ resp._history = tuple(history)
703
+
704
+ for trace in traces:
705
+ await trace.send_request_end(
706
+ method, url.update_query(params), headers, resp
707
+ )
708
+ return resp
709
+
710
+ except BaseException as e:
711
+ # cleanup timer
712
+ tm.close()
713
+ if handle:
714
+ handle.cancel()
715
+ handle = None
716
+
717
+ for trace in traces:
718
+ await trace.send_request_exception(
719
+ method, url.update_query(params), headers, e
720
+ )
721
+ raise
722
+
723
+ def ws_connect(
724
+ self,
725
+ url: StrOrURL,
726
+ *,
727
+ method: str = hdrs.METH_GET,
728
+ protocols: Iterable[str] = (),
729
+ timeout: float = 10.0,
730
+ receive_timeout: Optional[float] = None,
731
+ autoclose: bool = True,
732
+ autoping: bool = True,
733
+ heartbeat: Optional[float] = None,
734
+ auth: Optional[BasicAuth] = None,
735
+ origin: Optional[str] = None,
736
+ params: Optional[Mapping[str, str]] = None,
737
+ headers: Optional[LooseHeaders] = None,
738
+ proxy: Optional[StrOrURL] = None,
739
+ proxy_auth: Optional[BasicAuth] = None,
740
+ ssl: Union[SSLContext, bool, None, Fingerprint] = True,
741
+ verify_ssl: Optional[bool] = None,
742
+ fingerprint: Optional[bytes] = None,
743
+ ssl_context: Optional[SSLContext] = None,
744
+ proxy_headers: Optional[LooseHeaders] = None,
745
+ compress: int = 0,
746
+ max_msg_size: int = 4 * 1024 * 1024,
747
+ ) -> "_WSRequestContextManager":
748
+ """Initiate websocket connection."""
749
+ return _WSRequestContextManager(
750
+ self._ws_connect(
751
+ url,
752
+ method=method,
753
+ protocols=protocols,
754
+ timeout=timeout,
755
+ receive_timeout=receive_timeout,
756
+ autoclose=autoclose,
757
+ autoping=autoping,
758
+ heartbeat=heartbeat,
759
+ auth=auth,
760
+ origin=origin,
761
+ params=params,
762
+ headers=headers,
763
+ proxy=proxy,
764
+ proxy_auth=proxy_auth,
765
+ ssl=ssl,
766
+ verify_ssl=verify_ssl,
767
+ fingerprint=fingerprint,
768
+ ssl_context=ssl_context,
769
+ proxy_headers=proxy_headers,
770
+ compress=compress,
771
+ max_msg_size=max_msg_size,
772
+ )
773
+ )
774
+
775
+ async def _ws_connect(
776
+ self,
777
+ url: StrOrURL,
778
+ *,
779
+ method: str = hdrs.METH_GET,
780
+ protocols: Iterable[str] = (),
781
+ timeout: float = 10.0,
782
+ receive_timeout: Optional[float] = None,
783
+ autoclose: bool = True,
784
+ autoping: bool = True,
785
+ heartbeat: Optional[float] = None,
786
+ auth: Optional[BasicAuth] = None,
787
+ origin: Optional[str] = None,
788
+ params: Optional[Mapping[str, str]] = None,
789
+ headers: Optional[LooseHeaders] = None,
790
+ proxy: Optional[StrOrURL] = None,
791
+ proxy_auth: Optional[BasicAuth] = None,
792
+ ssl: Optional[Union[SSLContext, bool, Fingerprint]] = True,
793
+ verify_ssl: Optional[bool] = None,
794
+ fingerprint: Optional[bytes] = None,
795
+ ssl_context: Optional[SSLContext] = None,
796
+ proxy_headers: Optional[LooseHeaders] = None,
797
+ compress: int = 0,
798
+ max_msg_size: int = 4 * 1024 * 1024,
799
+ ) -> ClientWebSocketResponse:
800
+
801
+ if headers is None:
802
+ real_headers: CIMultiDict[str] = CIMultiDict()
803
+ else:
804
+ real_headers = CIMultiDict(headers)
805
+
806
+ default_headers = {
807
+ hdrs.UPGRADE: "websocket",
808
+ hdrs.CONNECTION: "Upgrade",
809
+ hdrs.SEC_WEBSOCKET_VERSION: "13",
810
+ }
811
+
812
+ for key, value in default_headers.items():
813
+ real_headers.setdefault(key, value)
814
+
815
+ sec_key = base64.b64encode(os.urandom(16))
816
+ real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
817
+
818
+ if protocols:
819
+ real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
820
+ if origin is not None:
821
+ real_headers[hdrs.ORIGIN] = origin
822
+ if compress:
823
+ extstr = ws_ext_gen(compress=compress)
824
+ real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
825
+
826
+ # For the sake of backward compatibility, if user passes in None, convert it to True
827
+ if ssl is None:
828
+ ssl = True
829
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
830
+
831
+ # send request
832
+ resp = await self.request(
833
+ method,
834
+ url,
835
+ params=params,
836
+ headers=real_headers,
837
+ read_until_eof=False,
838
+ auth=auth,
839
+ proxy=proxy,
840
+ proxy_auth=proxy_auth,
841
+ ssl=ssl,
842
+ proxy_headers=proxy_headers,
843
+ )
844
+
845
+ try:
846
+ # check handshake
847
+ if resp.status != 101:
848
+ raise WSServerHandshakeError(
849
+ resp.request_info,
850
+ resp.history,
851
+ message="Invalid response status",
852
+ status=resp.status,
853
+ headers=resp.headers,
854
+ )
855
+
856
+ if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
857
+ raise WSServerHandshakeError(
858
+ resp.request_info,
859
+ resp.history,
860
+ message="Invalid upgrade header",
861
+ status=resp.status,
862
+ headers=resp.headers,
863
+ )
864
+
865
+ if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
866
+ raise WSServerHandshakeError(
867
+ resp.request_info,
868
+ resp.history,
869
+ message="Invalid connection header",
870
+ status=resp.status,
871
+ headers=resp.headers,
872
+ )
873
+
874
+ # key calculation
875
+ r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
876
+ match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
877
+ if r_key != match:
878
+ raise WSServerHandshakeError(
879
+ resp.request_info,
880
+ resp.history,
881
+ message="Invalid challenge response",
882
+ status=resp.status,
883
+ headers=resp.headers,
884
+ )
885
+
886
+ # websocket protocol
887
+ protocol = None
888
+ if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
889
+ resp_protocols = [
890
+ proto.strip()
891
+ for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
892
+ ]
893
+
894
+ for proto in resp_protocols:
895
+ if proto in protocols:
896
+ protocol = proto
897
+ break
898
+
899
+ # websocket compress
900
+ notakeover = False
901
+ if compress:
902
+ compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
903
+ if compress_hdrs:
904
+ try:
905
+ compress, notakeover = ws_ext_parse(compress_hdrs)
906
+ except WSHandshakeError as exc:
907
+ raise WSServerHandshakeError(
908
+ resp.request_info,
909
+ resp.history,
910
+ message=exc.args[0],
911
+ status=resp.status,
912
+ headers=resp.headers,
913
+ ) from exc
914
+ else:
915
+ compress = 0
916
+ notakeover = False
917
+
918
+ conn = resp.connection
919
+ assert conn is not None
920
+ conn_proto = conn.protocol
921
+ assert conn_proto is not None
922
+ transport = conn.transport
923
+ assert transport is not None
924
+ reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(
925
+ conn_proto, 2**16, loop=self._loop
926
+ )
927
+ conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
928
+ writer = WebSocketWriter(
929
+ conn_proto,
930
+ transport,
931
+ use_mask=True,
932
+ compress=compress,
933
+ notakeover=notakeover,
934
+ )
935
+ except BaseException:
936
+ resp.close()
937
+ raise
938
+ else:
939
+ return self._ws_response_class(
940
+ reader,
941
+ writer,
942
+ protocol,
943
+ resp,
944
+ timeout,
945
+ autoclose,
946
+ autoping,
947
+ self._loop,
948
+ receive_timeout=receive_timeout,
949
+ heartbeat=heartbeat,
950
+ compress=compress,
951
+ client_notakeover=notakeover,
952
+ )
953
+
954
+ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
955
+ """Add default headers and transform it to CIMultiDict"""
956
+ # Convert headers to MultiDict
957
+ result = CIMultiDict(self._default_headers)
958
+ if headers:
959
+ if not isinstance(headers, (MultiDictProxy, MultiDict)):
960
+ headers = CIMultiDict(headers)
961
+ added_names: Set[str] = set()
962
+ for key, value in headers.items():
963
+ if key in added_names:
964
+ result.add(key, value)
965
+ else:
966
+ result[key] = value
967
+ added_names.add(key)
968
+ return result
969
+
970
+ def get(
971
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
972
+ ) -> "_RequestContextManager":
973
+ """Perform HTTP GET request."""
974
+ return _RequestContextManager(
975
+ self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
976
+ )
977
+
978
+ def options(
979
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
980
+ ) -> "_RequestContextManager":
981
+ """Perform HTTP OPTIONS request."""
982
+ return _RequestContextManager(
983
+ self._request(
984
+ hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
985
+ )
986
+ )
987
+
988
+ def head(
989
+ self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
990
+ ) -> "_RequestContextManager":
991
+ """Perform HTTP HEAD request."""
992
+ return _RequestContextManager(
993
+ self._request(
994
+ hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
995
+ )
996
+ )
997
+
998
+ def post(
999
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1000
+ ) -> "_RequestContextManager":
1001
+ """Perform HTTP POST request."""
1002
+ return _RequestContextManager(
1003
+ self._request(hdrs.METH_POST, url, data=data, **kwargs)
1004
+ )
1005
+
1006
+ def put(
1007
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1008
+ ) -> "_RequestContextManager":
1009
+ """Perform HTTP PUT request."""
1010
+ return _RequestContextManager(
1011
+ self._request(hdrs.METH_PUT, url, data=data, **kwargs)
1012
+ )
1013
+
1014
+ def patch(
1015
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1016
+ ) -> "_RequestContextManager":
1017
+ """Perform HTTP PATCH request."""
1018
+ return _RequestContextManager(
1019
+ self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
1020
+ )
1021
+
1022
+ def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
1023
+ """Perform HTTP DELETE request."""
1024
+ return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
1025
+
1026
+ async def close(self) -> None:
1027
+ """Close underlying connector.
1028
+
1029
+ Release all acquired resources.
1030
+ """
1031
+ if not self.closed:
1032
+ if self._connector is not None and self._connector_owner:
1033
+ await self._connector.close()
1034
+ self._connector = None
1035
+
1036
+ @property
1037
+ def closed(self) -> bool:
1038
+ """Is client session closed.
1039
+
1040
+ A readonly property.
1041
+ """
1042
+ return self._connector is None or self._connector.closed
1043
+
1044
+ @property
1045
+ def connector(self) -> Optional[BaseConnector]:
1046
+ """Connector instance used for the session."""
1047
+ return self._connector
1048
+
1049
+ @property
1050
+ def cookie_jar(self) -> AbstractCookieJar:
1051
+ """The session cookies."""
1052
+ return self._cookie_jar
1053
+
1054
+ @property
1055
+ def version(self) -> Tuple[int, int]:
1056
+ """The session HTTP protocol version."""
1057
+ return self._version
1058
+
1059
+ @property
1060
+ def requote_redirect_url(self) -> bool:
1061
+ """Do URL requoting on redirection handling."""
1062
+ return self._requote_redirect_url
1063
+
1064
+ @requote_redirect_url.setter
1065
+ def requote_redirect_url(self, val: bool) -> None:
1066
+ """Do URL requoting on redirection handling."""
1067
+ warnings.warn(
1068
+ "session.requote_redirect_url modification " "is deprecated #2778",
1069
+ DeprecationWarning,
1070
+ stacklevel=2,
1071
+ )
1072
+ self._requote_redirect_url = val
1073
+
1074
+ @property
1075
+ def loop(self) -> asyncio.AbstractEventLoop:
1076
+ """Session's loop."""
1077
+ warnings.warn(
1078
+ "client.loop property is deprecated", DeprecationWarning, stacklevel=2
1079
+ )
1080
+ return self._loop
1081
+
1082
+ @property
1083
+ def timeout(self) -> ClientTimeout:
1084
+ """Timeout for the session."""
1085
+ return self._timeout
1086
+
1087
+ @property
1088
+ def headers(self) -> "CIMultiDict[str]":
1089
+ """The default headers of the client session."""
1090
+ return self._default_headers
1091
+
1092
+ @property
1093
+ def skip_auto_headers(self) -> FrozenSet[istr]:
1094
+ """Headers for which autogeneration should be skipped"""
1095
+ return self._skip_auto_headers
1096
+
1097
+ @property
1098
+ def auth(self) -> Optional[BasicAuth]:
1099
+ """An object that represents HTTP Basic Authorization"""
1100
+ return self._default_auth
1101
+
1102
+ @property
1103
+ def json_serialize(self) -> JSONEncoder:
1104
+ """Json serializer callable"""
1105
+ return self._json_serialize
1106
+
1107
+ @property
1108
+ def connector_owner(self) -> bool:
1109
+ """Should connector be closed on session closing"""
1110
+ return self._connector_owner
1111
+
1112
+ @property
1113
+ def raise_for_status(
1114
+ self,
1115
+ ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
1116
+ """Should `ClientResponse.raise_for_status()` be called for each response."""
1117
+ return self._raise_for_status
1118
+
1119
+ @property
1120
+ def auto_decompress(self) -> bool:
1121
+ """Should the body response be automatically decompressed."""
1122
+ return self._auto_decompress
1123
+
1124
+ @property
1125
+ def trust_env(self) -> bool:
1126
+ """
1127
+ Should proxies information from environment or netrc be trusted.
1128
+
1129
+ Information is from HTTP_PROXY / HTTPS_PROXY environment variables
1130
+ or ~/.netrc file if present.
1131
+ """
1132
+ return self._trust_env
1133
+
1134
+ @property
1135
+ def trace_configs(self) -> List[TraceConfig]:
1136
+ """A list of TraceConfig instances used for client tracing"""
1137
+ return self._trace_configs
1138
+
1139
+ def detach(self) -> None:
1140
+ """Detach connector from session without closing the former.
1141
+
1142
+ Session is switched to closed state anyway.
1143
+ """
1144
+ self._connector = None
1145
+
1146
+ def __enter__(self) -> None:
1147
+ raise TypeError("Use async with instead")
1148
+
1149
+ def __exit__(
1150
+ self,
1151
+ exc_type: Optional[Type[BaseException]],
1152
+ exc_val: Optional[BaseException],
1153
+ exc_tb: Optional[TracebackType],
1154
+ ) -> None:
1155
+ # __exit__ should exist in pair with __enter__ but never executed
1156
+ pass # pragma: no cover
1157
+
1158
+ async def __aenter__(self) -> "ClientSession":
1159
+ return self
1160
+
1161
+ async def __aexit__(
1162
+ self,
1163
+ exc_type: Optional[Type[BaseException]],
1164
+ exc_val: Optional[BaseException],
1165
+ exc_tb: Optional[TracebackType],
1166
+ ) -> None:
1167
+ await self.close()
1168
+
1169
+
1170
+ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
1171
+
1172
+ __slots__ = ("_coro", "_resp")
1173
+
1174
+ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
1175
+ self._coro = coro
1176
+
1177
+ def send(self, arg: None) -> "asyncio.Future[Any]":
1178
+ return self._coro.send(arg)
1179
+
1180
+ def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
1181
+ return self._coro.throw(*args, **kwargs)
1182
+
1183
+ def close(self) -> None:
1184
+ return self._coro.close()
1185
+
1186
+ def __await__(self) -> Generator[Any, None, _RetType]:
1187
+ ret = self._coro.__await__()
1188
+ return ret
1189
+
1190
+ def __iter__(self) -> Generator[Any, None, _RetType]:
1191
+ return self.__await__()
1192
+
1193
+ async def __aenter__(self) -> _RetType:
1194
+ self._resp = await self._coro
1195
+ return self._resp
1196
+
1197
+
1198
+ class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
1199
+ __slots__ = ()
1200
+
1201
+ async def __aexit__(
1202
+ self,
1203
+ exc_type: Optional[Type[BaseException]],
1204
+ exc: Optional[BaseException],
1205
+ tb: Optional[TracebackType],
1206
+ ) -> None:
1207
+ # We're basing behavior on the exception as it can be caused by
1208
+ # user code unrelated to the status of the connection. If you
1209
+ # would like to close a connection you must do that
1210
+ # explicitly. Otherwise connection error handling should kick in
1211
+ # and close/recycle the connection as required.
1212
+ self._resp.release()
1213
+ await self._resp.wait_for_close()
1214
+
1215
+
1216
+ class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
1217
+ __slots__ = ()
1218
+
1219
+ async def __aexit__(
1220
+ self,
1221
+ exc_type: Optional[Type[BaseException]],
1222
+ exc: Optional[BaseException],
1223
+ tb: Optional[TracebackType],
1224
+ ) -> None:
1225
+ await self._resp.close()
1226
+
1227
+
1228
+ class _SessionRequestContextManager:
1229
+
1230
+ __slots__ = ("_coro", "_resp", "_session")
1231
+
1232
+ def __init__(
1233
+ self,
1234
+ coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
1235
+ session: ClientSession,
1236
+ ) -> None:
1237
+ self._coro = coro
1238
+ self._resp: Optional[ClientResponse] = None
1239
+ self._session = session
1240
+
1241
+ async def __aenter__(self) -> ClientResponse:
1242
+ try:
1243
+ self._resp = await self._coro
1244
+ except BaseException:
1245
+ await self._session.close()
1246
+ raise
1247
+ else:
1248
+ return self._resp
1249
+
1250
+ async def __aexit__(
1251
+ self,
1252
+ exc_type: Optional[Type[BaseException]],
1253
+ exc: Optional[BaseException],
1254
+ tb: Optional[TracebackType],
1255
+ ) -> None:
1256
+ assert self._resp is not None
1257
+ self._resp.close()
1258
+ await self._session.close()
1259
+
1260
+
1261
+ def request(
1262
+ method: str,
1263
+ url: StrOrURL,
1264
+ *,
1265
+ params: Optional[Mapping[str, str]] = None,
1266
+ data: Any = None,
1267
+ json: Any = None,
1268
+ headers: Optional[LooseHeaders] = None,
1269
+ skip_auto_headers: Optional[Iterable[str]] = None,
1270
+ auth: Optional[BasicAuth] = None,
1271
+ allow_redirects: bool = True,
1272
+ max_redirects: int = 10,
1273
+ compress: Optional[str] = None,
1274
+ chunked: Optional[bool] = None,
1275
+ expect100: bool = False,
1276
+ raise_for_status: Optional[bool] = None,
1277
+ read_until_eof: bool = True,
1278
+ proxy: Optional[StrOrURL] = None,
1279
+ proxy_auth: Optional[BasicAuth] = None,
1280
+ timeout: Union[ClientTimeout, object] = sentinel,
1281
+ cookies: Optional[LooseCookies] = None,
1282
+ version: HttpVersion = http.HttpVersion11,
1283
+ connector: Optional[BaseConnector] = None,
1284
+ read_bufsize: Optional[int] = None,
1285
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1286
+ max_line_size: int = 8190,
1287
+ max_field_size: int = 8190,
1288
+ ) -> _SessionRequestContextManager:
1289
+ """Constructs and sends a request.
1290
+
1291
+ Returns response object.
1292
+ method - HTTP method
1293
+ url - request url
1294
+ params - (optional) Dictionary or bytes to be sent in the query
1295
+ string of the new request
1296
+ data - (optional) Dictionary, bytes, or file-like object to
1297
+ send in the body of the request
1298
+ json - (optional) Any json compatible python object
1299
+ headers - (optional) Dictionary of HTTP Headers to send with
1300
+ the request
1301
+ cookies - (optional) Dict object to send with the request
1302
+ auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
1303
+ auth - aiohttp.helpers.BasicAuth
1304
+ allow_redirects - (optional) If set to False, do not follow
1305
+ redirects
1306
+ version - Request HTTP version.
1307
+ compress - Set to True if request has to be compressed
1308
+ with deflate encoding.
1309
+ chunked - Set to chunk size for chunked transfer encoding.
1310
+ expect100 - Expect 100-continue response from server.
1311
+ connector - BaseConnector sub-class instance to support
1312
+ connection pooling.
1313
+ read_until_eof - Read response until eof if response
1314
+ does not have Content-Length header.
1315
+ loop - Optional event loop.
1316
+ timeout - Optional ClientTimeout settings structure, 5min
1317
+ total timeout by default.
1318
+ Usage::
1319
+ >>> import aiohttp
1320
+ >>> resp = await aiohttp.request('GET', 'http://python.org/')
1321
+ >>> resp
1322
+ <ClientResponse(python.org/) [200]>
1323
+ >>> data = await resp.read()
1324
+ """
1325
+ connector_owner = False
1326
+ if connector is None:
1327
+ connector_owner = True
1328
+ connector = TCPConnector(loop=loop, force_close=True)
1329
+
1330
+ session = ClientSession(
1331
+ loop=loop,
1332
+ cookies=cookies,
1333
+ version=version,
1334
+ timeout=timeout,
1335
+ connector=connector,
1336
+ connector_owner=connector_owner,
1337
+ )
1338
+
1339
+ return _SessionRequestContextManager(
1340
+ session._request(
1341
+ method,
1342
+ url,
1343
+ params=params,
1344
+ data=data,
1345
+ json=json,
1346
+ headers=headers,
1347
+ skip_auto_headers=skip_auto_headers,
1348
+ auth=auth,
1349
+ allow_redirects=allow_redirects,
1350
+ max_redirects=max_redirects,
1351
+ compress=compress,
1352
+ chunked=chunked,
1353
+ expect100=expect100,
1354
+ raise_for_status=raise_for_status,
1355
+ read_until_eof=read_until_eof,
1356
+ proxy=proxy,
1357
+ proxy_auth=proxy_auth,
1358
+ read_bufsize=read_bufsize,
1359
+ max_line_size=max_line_size,
1360
+ max_field_size=max_field_size,
1361
+ ),
1362
+ session,
1363
+ )
env-llmeval/lib/python3.10/site-packages/aiohttp/client_exceptions.py ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP related errors."""
2
+
3
+ import asyncio
4
+ import warnings
5
+ from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
6
+
7
+ from .http_parser import RawResponseMessage
8
+ from .typedefs import LooseHeaders
9
+
10
+ try:
11
+ import ssl
12
+
13
+ SSLContext = ssl.SSLContext
14
+ except ImportError: # pragma: no cover
15
+ ssl = SSLContext = None # type: ignore[assignment]
16
+
17
+
18
+ if TYPE_CHECKING:
19
+ from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
20
+ else:
21
+ RequestInfo = ClientResponse = ConnectionKey = None
22
+
23
+ __all__ = (
24
+ "ClientError",
25
+ "ClientConnectionError",
26
+ "ClientOSError",
27
+ "ClientConnectorError",
28
+ "ClientProxyConnectionError",
29
+ "ClientSSLError",
30
+ "ClientConnectorSSLError",
31
+ "ClientConnectorCertificateError",
32
+ "ServerConnectionError",
33
+ "ServerTimeoutError",
34
+ "ServerDisconnectedError",
35
+ "ServerFingerprintMismatch",
36
+ "ClientResponseError",
37
+ "ClientHttpProxyError",
38
+ "WSServerHandshakeError",
39
+ "ContentTypeError",
40
+ "ClientPayloadError",
41
+ "InvalidURL",
42
+ )
43
+
44
+
45
+ class ClientError(Exception):
46
+ """Base class for client connection errors."""
47
+
48
+
49
+ class ClientResponseError(ClientError):
50
+ """Base class for exceptions that occur after getting a response.
51
+
52
+ request_info: An instance of RequestInfo.
53
+ history: A sequence of responses, if redirects occurred.
54
+ status: HTTP status code.
55
+ message: Error message.
56
+ headers: Response headers.
57
+ """
58
+
59
+ def __init__(
60
+ self,
61
+ request_info: RequestInfo,
62
+ history: Tuple[ClientResponse, ...],
63
+ *,
64
+ code: Optional[int] = None,
65
+ status: Optional[int] = None,
66
+ message: str = "",
67
+ headers: Optional[LooseHeaders] = None,
68
+ ) -> None:
69
+ self.request_info = request_info
70
+ if code is not None:
71
+ if status is not None:
72
+ raise ValueError(
73
+ "Both code and status arguments are provided; "
74
+ "code is deprecated, use status instead"
75
+ )
76
+ warnings.warn(
77
+ "code argument is deprecated, use status instead",
78
+ DeprecationWarning,
79
+ stacklevel=2,
80
+ )
81
+ if status is not None:
82
+ self.status = status
83
+ elif code is not None:
84
+ self.status = code
85
+ else:
86
+ self.status = 0
87
+ self.message = message
88
+ self.headers = headers
89
+ self.history = history
90
+ self.args = (request_info, history)
91
+
92
+ def __str__(self) -> str:
93
+ return "{}, message={!r}, url={!r}".format(
94
+ self.status,
95
+ self.message,
96
+ self.request_info.real_url,
97
+ )
98
+
99
+ def __repr__(self) -> str:
100
+ args = f"{self.request_info!r}, {self.history!r}"
101
+ if self.status != 0:
102
+ args += f", status={self.status!r}"
103
+ if self.message != "":
104
+ args += f", message={self.message!r}"
105
+ if self.headers is not None:
106
+ args += f", headers={self.headers!r}"
107
+ return f"{type(self).__name__}({args})"
108
+
109
+ @property
110
+ def code(self) -> int:
111
+ warnings.warn(
112
+ "code property is deprecated, use status instead",
113
+ DeprecationWarning,
114
+ stacklevel=2,
115
+ )
116
+ return self.status
117
+
118
+ @code.setter
119
+ def code(self, value: int) -> None:
120
+ warnings.warn(
121
+ "code property is deprecated, use status instead",
122
+ DeprecationWarning,
123
+ stacklevel=2,
124
+ )
125
+ self.status = value
126
+
127
+
128
+ class ContentTypeError(ClientResponseError):
129
+ """ContentType found is not valid."""
130
+
131
+
132
+ class WSServerHandshakeError(ClientResponseError):
133
+ """websocket server handshake error."""
134
+
135
+
136
+ class ClientHttpProxyError(ClientResponseError):
137
+ """HTTP proxy error.
138
+
139
+ Raised in :class:`aiohttp.connector.TCPConnector` if
140
+ proxy responds with status other than ``200 OK``
141
+ on ``CONNECT`` request.
142
+ """
143
+
144
+
145
+ class TooManyRedirects(ClientResponseError):
146
+ """Client was redirected too many times."""
147
+
148
+
149
+ class ClientConnectionError(ClientError):
150
+ """Base class for client socket errors."""
151
+
152
+
153
+ class ClientOSError(ClientConnectionError, OSError):
154
+ """OSError error."""
155
+
156
+
157
+ class ClientConnectorError(ClientOSError):
158
+ """Client connector error.
159
+
160
+ Raised in :class:`aiohttp.connector.TCPConnector` if
161
+ a connection can not be established.
162
+ """
163
+
164
+ def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
165
+ self._conn_key = connection_key
166
+ self._os_error = os_error
167
+ super().__init__(os_error.errno, os_error.strerror)
168
+ self.args = (connection_key, os_error)
169
+
170
+ @property
171
+ def os_error(self) -> OSError:
172
+ return self._os_error
173
+
174
+ @property
175
+ def host(self) -> str:
176
+ return self._conn_key.host
177
+
178
+ @property
179
+ def port(self) -> Optional[int]:
180
+ return self._conn_key.port
181
+
182
+ @property
183
+ def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
184
+ return self._conn_key.ssl
185
+
186
+ def __str__(self) -> str:
187
+ return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
188
+ self, "default" if self.ssl is True else self.ssl, self.strerror
189
+ )
190
+
191
+ # OSError.__reduce__ does too much black magick
192
+ __reduce__ = BaseException.__reduce__
193
+
194
+
195
+ class ClientProxyConnectionError(ClientConnectorError):
196
+ """Proxy connection error.
197
+
198
+ Raised in :class:`aiohttp.connector.TCPConnector` if
199
+ connection to proxy can not be established.
200
+ """
201
+
202
+
203
+ class UnixClientConnectorError(ClientConnectorError):
204
+ """Unix connector error.
205
+
206
+ Raised in :py:class:`aiohttp.connector.UnixConnector`
207
+ if connection to unix socket can not be established.
208
+ """
209
+
210
+ def __init__(
211
+ self, path: str, connection_key: ConnectionKey, os_error: OSError
212
+ ) -> None:
213
+ self._path = path
214
+ super().__init__(connection_key, os_error)
215
+
216
+ @property
217
+ def path(self) -> str:
218
+ return self._path
219
+
220
+ def __str__(self) -> str:
221
+ return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
222
+ self, "default" if self.ssl is True else self.ssl, self.strerror
223
+ )
224
+
225
+
226
+ class ServerConnectionError(ClientConnectionError):
227
+ """Server connection errors."""
228
+
229
+
230
+ class ServerDisconnectedError(ServerConnectionError):
231
+ """Server disconnected."""
232
+
233
+ def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
234
+ if message is None:
235
+ message = "Server disconnected"
236
+
237
+ self.args = (message,)
238
+ self.message = message
239
+
240
+
241
+ class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
242
+ """Server timeout error."""
243
+
244
+
245
+ class ServerFingerprintMismatch(ServerConnectionError):
246
+ """SSL certificate does not match expected fingerprint."""
247
+
248
+ def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
249
+ self.expected = expected
250
+ self.got = got
251
+ self.host = host
252
+ self.port = port
253
+ self.args = (expected, got, host, port)
254
+
255
+ def __repr__(self) -> str:
256
+ return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
257
+ self.__class__.__name__, self.expected, self.got, self.host, self.port
258
+ )
259
+
260
+
261
+ class ClientPayloadError(ClientError):
262
+ """Response payload error."""
263
+
264
+
265
+ class InvalidURL(ClientError, ValueError):
266
+ """Invalid URL.
267
+
268
+ URL used for fetching is malformed, e.g. it doesn't contains host
269
+ part.
270
+ """
271
+
272
+ # Derive from ValueError for backward compatibility
273
+
274
+ def __init__(self, url: Any) -> None:
275
+ # The type of url is not yarl.URL because the exception can be raised
276
+ # on URL(url) call
277
+ super().__init__(url)
278
+
279
+ @property
280
+ def url(self) -> Any:
281
+ return self.args[0]
282
+
283
+ def __repr__(self) -> str:
284
+ return f"<{self.__class__.__name__} {self.url}>"
285
+
286
+
287
+ class ClientSSLError(ClientConnectorError):
288
+ """Base error for ssl.*Errors."""
289
+
290
+
291
+ if ssl is not None:
292
+ cert_errors = (ssl.CertificateError,)
293
+ cert_errors_bases = (
294
+ ClientSSLError,
295
+ ssl.CertificateError,
296
+ )
297
+
298
+ ssl_errors = (ssl.SSLError,)
299
+ ssl_error_bases = (ClientSSLError, ssl.SSLError)
300
+ else: # pragma: no cover
301
+ cert_errors = tuple()
302
+ cert_errors_bases = (
303
+ ClientSSLError,
304
+ ValueError,
305
+ )
306
+
307
+ ssl_errors = tuple()
308
+ ssl_error_bases = (ClientSSLError,)
309
+
310
+
311
+ class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
312
+ """Response ssl error."""
313
+
314
+
315
+ class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
316
+ """Response certificate error."""
317
+
318
+ def __init__(
319
+ self, connection_key: ConnectionKey, certificate_error: Exception
320
+ ) -> None:
321
+ self._conn_key = connection_key
322
+ self._certificate_error = certificate_error
323
+ self.args = (connection_key, certificate_error)
324
+
325
+ @property
326
+ def certificate_error(self) -> Exception:
327
+ return self._certificate_error
328
+
329
+ @property
330
+ def host(self) -> str:
331
+ return self._conn_key.host
332
+
333
+ @property
334
+ def port(self) -> Optional[int]:
335
+ return self._conn_key.port
336
+
337
+ @property
338
+ def ssl(self) -> bool:
339
+ return self._conn_key.is_ssl
340
+
341
+ def __str__(self) -> str:
342
+ return (
343
+ "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
344
+ "[{0.certificate_error.__class__.__name__}: "
345
+ "{0.certificate_error.args}]".format(self)
346
+ )
env-llmeval/lib/python3.10/site-packages/aiohttp/client_proto.py ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from contextlib import suppress
3
+ from typing import Any, Optional, Tuple
4
+
5
+ from .base_protocol import BaseProtocol
6
+ from .client_exceptions import (
7
+ ClientOSError,
8
+ ClientPayloadError,
9
+ ServerDisconnectedError,
10
+ ServerTimeoutError,
11
+ )
12
+ from .helpers import (
13
+ _EXC_SENTINEL,
14
+ BaseTimerContext,
15
+ set_exception,
16
+ status_code_must_be_empty_body,
17
+ )
18
+ from .http import HttpResponseParser, RawResponseMessage
19
+ from .http_exceptions import HttpProcessingError
20
+ from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
21
+
22
+
23
+ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
24
+ """Helper class to adapt between Protocol and StreamReader."""
25
+
26
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
27
+ BaseProtocol.__init__(self, loop=loop)
28
+ DataQueue.__init__(self, loop)
29
+
30
+ self._should_close = False
31
+
32
+ self._payload: Optional[StreamReader] = None
33
+ self._skip_payload = False
34
+ self._payload_parser = None
35
+
36
+ self._timer = None
37
+
38
+ self._tail = b""
39
+ self._upgraded = False
40
+ self._parser: Optional[HttpResponseParser] = None
41
+
42
+ self._read_timeout: Optional[float] = None
43
+ self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
44
+
45
+ self._timeout_ceil_threshold: Optional[float] = 5
46
+
47
+ @property
48
+ def upgraded(self) -> bool:
49
+ return self._upgraded
50
+
51
+ @property
52
+ def should_close(self) -> bool:
53
+ if self._payload is not None and not self._payload.is_eof() or self._upgraded:
54
+ return True
55
+
56
+ return (
57
+ self._should_close
58
+ or self._upgraded
59
+ or self.exception() is not None
60
+ or self._payload_parser is not None
61
+ or len(self) > 0
62
+ or bool(self._tail)
63
+ )
64
+
65
+ def force_close(self) -> None:
66
+ self._should_close = True
67
+
68
+ def close(self) -> None:
69
+ transport = self.transport
70
+ if transport is not None:
71
+ transport.close()
72
+ self.transport = None
73
+ self._payload = None
74
+ self._drop_timeout()
75
+
76
+ def is_connected(self) -> bool:
77
+ return self.transport is not None and not self.transport.is_closing()
78
+
79
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
80
+ self._drop_timeout()
81
+
82
+ original_connection_error = exc
83
+ reraised_exc = original_connection_error
84
+
85
+ connection_closed_cleanly = original_connection_error is None
86
+
87
+ if self._payload_parser is not None:
88
+ with suppress(Exception): # FIXME: log this somehow?
89
+ self._payload_parser.feed_eof()
90
+
91
+ uncompleted = None
92
+ if self._parser is not None:
93
+ try:
94
+ uncompleted = self._parser.feed_eof()
95
+ except Exception as underlying_exc:
96
+ if self._payload is not None:
97
+ client_payload_exc_msg = (
98
+ f"Response payload is not completed: {underlying_exc !r}"
99
+ )
100
+ if not connection_closed_cleanly:
101
+ client_payload_exc_msg = (
102
+ f"{client_payload_exc_msg !s}. "
103
+ f"{original_connection_error !r}"
104
+ )
105
+ set_exception(
106
+ self._payload,
107
+ ClientPayloadError(client_payload_exc_msg),
108
+ underlying_exc,
109
+ )
110
+
111
+ if not self.is_eof():
112
+ if isinstance(original_connection_error, OSError):
113
+ reraised_exc = ClientOSError(*original_connection_error.args)
114
+ if connection_closed_cleanly:
115
+ reraised_exc = ServerDisconnectedError(uncompleted)
116
+ # assigns self._should_close to True as side effect,
117
+ # we do it anyway below
118
+ underlying_non_eof_exc = (
119
+ _EXC_SENTINEL
120
+ if connection_closed_cleanly
121
+ else original_connection_error
122
+ )
123
+ assert underlying_non_eof_exc is not None
124
+ assert reraised_exc is not None
125
+ self.set_exception(reraised_exc, underlying_non_eof_exc)
126
+
127
+ self._should_close = True
128
+ self._parser = None
129
+ self._payload = None
130
+ self._payload_parser = None
131
+ self._reading_paused = False
132
+
133
+ super().connection_lost(reraised_exc)
134
+
135
+ def eof_received(self) -> None:
136
+ # should call parser.feed_eof() most likely
137
+ self._drop_timeout()
138
+
139
+ def pause_reading(self) -> None:
140
+ super().pause_reading()
141
+ self._drop_timeout()
142
+
143
+ def resume_reading(self) -> None:
144
+ super().resume_reading()
145
+ self._reschedule_timeout()
146
+
147
+ def set_exception(
148
+ self,
149
+ exc: BaseException,
150
+ exc_cause: BaseException = _EXC_SENTINEL,
151
+ ) -> None:
152
+ self._should_close = True
153
+ self._drop_timeout()
154
+ super().set_exception(exc, exc_cause)
155
+
156
+ def set_parser(self, parser: Any, payload: Any) -> None:
157
+ # TODO: actual types are:
158
+ # parser: WebSocketReader
159
+ # payload: FlowControlDataQueue
160
+ # but they are not generi enough
161
+ # Need an ABC for both types
162
+ self._payload = payload
163
+ self._payload_parser = parser
164
+
165
+ self._drop_timeout()
166
+
167
+ if self._tail:
168
+ data, self._tail = self._tail, b""
169
+ self.data_received(data)
170
+
171
+ def set_response_params(
172
+ self,
173
+ *,
174
+ timer: Optional[BaseTimerContext] = None,
175
+ skip_payload: bool = False,
176
+ read_until_eof: bool = False,
177
+ auto_decompress: bool = True,
178
+ read_timeout: Optional[float] = None,
179
+ read_bufsize: int = 2**16,
180
+ timeout_ceil_threshold: float = 5,
181
+ max_line_size: int = 8190,
182
+ max_field_size: int = 8190,
183
+ ) -> None:
184
+ self._skip_payload = skip_payload
185
+
186
+ self._read_timeout = read_timeout
187
+
188
+ self._timeout_ceil_threshold = timeout_ceil_threshold
189
+
190
+ self._parser = HttpResponseParser(
191
+ self,
192
+ self._loop,
193
+ read_bufsize,
194
+ timer=timer,
195
+ payload_exception=ClientPayloadError,
196
+ response_with_body=not skip_payload,
197
+ read_until_eof=read_until_eof,
198
+ auto_decompress=auto_decompress,
199
+ max_line_size=max_line_size,
200
+ max_field_size=max_field_size,
201
+ )
202
+
203
+ if self._tail:
204
+ data, self._tail = self._tail, b""
205
+ self.data_received(data)
206
+
207
+ def _drop_timeout(self) -> None:
208
+ if self._read_timeout_handle is not None:
209
+ self._read_timeout_handle.cancel()
210
+ self._read_timeout_handle = None
211
+
212
+ def _reschedule_timeout(self) -> None:
213
+ timeout = self._read_timeout
214
+ if self._read_timeout_handle is not None:
215
+ self._read_timeout_handle.cancel()
216
+
217
+ if timeout:
218
+ self._read_timeout_handle = self._loop.call_later(
219
+ timeout, self._on_read_timeout
220
+ )
221
+ else:
222
+ self._read_timeout_handle = None
223
+
224
+ def start_timeout(self) -> None:
225
+ self._reschedule_timeout()
226
+
227
+ def _on_read_timeout(self) -> None:
228
+ exc = ServerTimeoutError("Timeout on reading data from socket")
229
+ self.set_exception(exc)
230
+ if self._payload is not None:
231
+ set_exception(self._payload, exc)
232
+
233
+ def data_received(self, data: bytes) -> None:
234
+ self._reschedule_timeout()
235
+
236
+ if not data:
237
+ return
238
+
239
+ # custom payload parser
240
+ if self._payload_parser is not None:
241
+ eof, tail = self._payload_parser.feed_data(data)
242
+ if eof:
243
+ self._payload = None
244
+ self._payload_parser = None
245
+
246
+ if tail:
247
+ self.data_received(tail)
248
+ return
249
+ else:
250
+ if self._upgraded or self._parser is None:
251
+ # i.e. websocket connection, websocket parser is not set yet
252
+ self._tail += data
253
+ else:
254
+ # parse http messages
255
+ try:
256
+ messages, upgraded, tail = self._parser.feed_data(data)
257
+ except BaseException as underlying_exc:
258
+ if self.transport is not None:
259
+ # connection.release() could be called BEFORE
260
+ # data_received(), the transport is already
261
+ # closed in this case
262
+ self.transport.close()
263
+ # should_close is True after the call
264
+ self.set_exception(HttpProcessingError(), underlying_exc)
265
+ return
266
+
267
+ self._upgraded = upgraded
268
+
269
+ payload: Optional[StreamReader] = None
270
+ for message, payload in messages:
271
+ if message.should_close:
272
+ self._should_close = True
273
+
274
+ self._payload = payload
275
+
276
+ if self._skip_payload or status_code_must_be_empty_body(
277
+ message.code
278
+ ):
279
+ self.feed_data((message, EMPTY_PAYLOAD), 0)
280
+ else:
281
+ self.feed_data((message, payload), 0)
282
+ if payload is not None:
283
+ # new message(s) was processed
284
+ # register timeout handler unsubscribing
285
+ # either on end-of-stream or immediately for
286
+ # EMPTY_PAYLOAD
287
+ if payload is not EMPTY_PAYLOAD:
288
+ payload.on_eof(self._drop_timeout)
289
+ else:
290
+ self._drop_timeout()
291
+
292
+ if tail:
293
+ if upgraded:
294
+ self.data_received(tail)
295
+ else:
296
+ self._tail = tail
env-llmeval/lib/python3.10/site-packages/aiohttp/client_reqrep.py ADDED
@@ -0,0 +1,1207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import codecs
3
+ import contextlib
4
+ import functools
5
+ import io
6
+ import re
7
+ import sys
8
+ import traceback
9
+ import warnings
10
+ from hashlib import md5, sha1, sha256
11
+ from http.cookies import CookieError, Morsel, SimpleCookie
12
+ from types import MappingProxyType, TracebackType
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Callable,
17
+ Dict,
18
+ Iterable,
19
+ List,
20
+ Mapping,
21
+ Optional,
22
+ Tuple,
23
+ Type,
24
+ Union,
25
+ cast,
26
+ )
27
+
28
+ import attr
29
+ from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
30
+ from yarl import URL
31
+
32
+ from . import hdrs, helpers, http, multipart, payload
33
+ from .abc import AbstractStreamWriter
34
+ from .client_exceptions import (
35
+ ClientConnectionError,
36
+ ClientOSError,
37
+ ClientResponseError,
38
+ ContentTypeError,
39
+ InvalidURL,
40
+ ServerFingerprintMismatch,
41
+ )
42
+ from .compression_utils import HAS_BROTLI
43
+ from .formdata import FormData
44
+ from .helpers import (
45
+ BaseTimerContext,
46
+ BasicAuth,
47
+ HeadersMixin,
48
+ TimerNoop,
49
+ basicauth_from_netrc,
50
+ netrc_from_env,
51
+ noop,
52
+ reify,
53
+ set_exception,
54
+ set_result,
55
+ )
56
+ from .http import (
57
+ SERVER_SOFTWARE,
58
+ HttpVersion,
59
+ HttpVersion10,
60
+ HttpVersion11,
61
+ StreamWriter,
62
+ )
63
+ from .log import client_logger
64
+ from .streams import StreamReader
65
+ from .typedefs import (
66
+ DEFAULT_JSON_DECODER,
67
+ JSONDecoder,
68
+ LooseCookies,
69
+ LooseHeaders,
70
+ RawHeaders,
71
+ )
72
+
73
+ try:
74
+ import ssl
75
+ from ssl import SSLContext
76
+ except ImportError: # pragma: no cover
77
+ ssl = None # type: ignore[assignment]
78
+ SSLContext = object # type: ignore[misc,assignment]
79
+
80
+
81
+ __all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
82
+
83
+
84
+ if TYPE_CHECKING:
85
+ from .client import ClientSession
86
+ from .connector import Connection
87
+ from .tracing import Trace
88
+
89
+
90
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
91
+ json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
92
+
93
+
94
+ def _gen_default_accept_encoding() -> str:
95
+ return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
96
+
97
+
98
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
99
+ class ContentDisposition:
100
+ type: Optional[str]
101
+ parameters: "MappingProxyType[str, str]"
102
+ filename: Optional[str]
103
+
104
+
105
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
106
+ class RequestInfo:
107
+ url: URL
108
+ method: str
109
+ headers: "CIMultiDictProxy[str]"
110
+ real_url: URL = attr.ib()
111
+
112
+ @real_url.default
113
+ def real_url_default(self) -> URL:
114
+ return self.url
115
+
116
+
117
+ class Fingerprint:
118
+ HASHFUNC_BY_DIGESTLEN = {
119
+ 16: md5,
120
+ 20: sha1,
121
+ 32: sha256,
122
+ }
123
+
124
+ def __init__(self, fingerprint: bytes) -> None:
125
+ digestlen = len(fingerprint)
126
+ hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
127
+ if not hashfunc:
128
+ raise ValueError("fingerprint has invalid length")
129
+ elif hashfunc is md5 or hashfunc is sha1:
130
+ raise ValueError(
131
+ "md5 and sha1 are insecure and " "not supported. Use sha256."
132
+ )
133
+ self._hashfunc = hashfunc
134
+ self._fingerprint = fingerprint
135
+
136
+ @property
137
+ def fingerprint(self) -> bytes:
138
+ return self._fingerprint
139
+
140
+ def check(self, transport: asyncio.Transport) -> None:
141
+ if not transport.get_extra_info("sslcontext"):
142
+ return
143
+ sslobj = transport.get_extra_info("ssl_object")
144
+ cert = sslobj.getpeercert(binary_form=True)
145
+ got = self._hashfunc(cert).digest()
146
+ if got != self._fingerprint:
147
+ host, port, *_ = transport.get_extra_info("peername")
148
+ raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
149
+
150
+
151
+ if ssl is not None:
152
+ SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
153
+ else: # pragma: no cover
154
+ SSL_ALLOWED_TYPES = (bool, type(None))
155
+
156
+
157
+ def _merge_ssl_params(
158
+ ssl: Union["SSLContext", bool, Fingerprint],
159
+ verify_ssl: Optional[bool],
160
+ ssl_context: Optional["SSLContext"],
161
+ fingerprint: Optional[bytes],
162
+ ) -> Union["SSLContext", bool, Fingerprint]:
163
+ if ssl is None:
164
+ ssl = True # Double check for backwards compatibility
165
+ if verify_ssl is not None and not verify_ssl:
166
+ warnings.warn(
167
+ "verify_ssl is deprecated, use ssl=False instead",
168
+ DeprecationWarning,
169
+ stacklevel=3,
170
+ )
171
+ if ssl is not True:
172
+ raise ValueError(
173
+ "verify_ssl, ssl_context, fingerprint and ssl "
174
+ "parameters are mutually exclusive"
175
+ )
176
+ else:
177
+ ssl = False
178
+ if ssl_context is not None:
179
+ warnings.warn(
180
+ "ssl_context is deprecated, use ssl=context instead",
181
+ DeprecationWarning,
182
+ stacklevel=3,
183
+ )
184
+ if ssl is not True:
185
+ raise ValueError(
186
+ "verify_ssl, ssl_context, fingerprint and ssl "
187
+ "parameters are mutually exclusive"
188
+ )
189
+ else:
190
+ ssl = ssl_context
191
+ if fingerprint is not None:
192
+ warnings.warn(
193
+ "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
194
+ DeprecationWarning,
195
+ stacklevel=3,
196
+ )
197
+ if ssl is not True:
198
+ raise ValueError(
199
+ "verify_ssl, ssl_context, fingerprint and ssl "
200
+ "parameters are mutually exclusive"
201
+ )
202
+ else:
203
+ ssl = Fingerprint(fingerprint)
204
+ if not isinstance(ssl, SSL_ALLOWED_TYPES):
205
+ raise TypeError(
206
+ "ssl should be SSLContext, bool, Fingerprint or None, "
207
+ "got {!r} instead.".format(ssl)
208
+ )
209
+ return ssl
210
+
211
+
212
+ @attr.s(auto_attribs=True, slots=True, frozen=True)
213
+ class ConnectionKey:
214
+ # the key should contain an information about used proxy / TLS
215
+ # to prevent reusing wrong connections from a pool
216
+ host: str
217
+ port: Optional[int]
218
+ is_ssl: bool
219
+ ssl: Union[SSLContext, bool, Fingerprint]
220
+ proxy: Optional[URL]
221
+ proxy_auth: Optional[BasicAuth]
222
+ proxy_headers_hash: Optional[int] # hash(CIMultiDict)
223
+
224
+
225
+ def _is_expected_content_type(
226
+ response_content_type: str, expected_content_type: str
227
+ ) -> bool:
228
+ if expected_content_type == "application/json":
229
+ return json_re.match(response_content_type) is not None
230
+ return expected_content_type in response_content_type
231
+
232
+
233
+ class ClientRequest:
234
+ GET_METHODS = {
235
+ hdrs.METH_GET,
236
+ hdrs.METH_HEAD,
237
+ hdrs.METH_OPTIONS,
238
+ hdrs.METH_TRACE,
239
+ }
240
+ POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
241
+ ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
242
+
243
+ DEFAULT_HEADERS = {
244
+ hdrs.ACCEPT: "*/*",
245
+ hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
246
+ }
247
+
248
+ body = b""
249
+ auth = None
250
+ response = None
251
+
252
+ __writer = None # async task for streaming data
253
+ _continue = None # waiter future for '100 Continue' response
254
+
255
+ # N.B.
256
+ # Adding __del__ method with self._writer closing doesn't make sense
257
+ # because _writer is instance method, thus it keeps a reference to self.
258
+ # Until writer has finished finalizer will not be called.
259
+
260
+ def __init__(
261
+ self,
262
+ method: str,
263
+ url: URL,
264
+ *,
265
+ params: Optional[Mapping[str, str]] = None,
266
+ headers: Optional[LooseHeaders] = None,
267
+ skip_auto_headers: Iterable[str] = frozenset(),
268
+ data: Any = None,
269
+ cookies: Optional[LooseCookies] = None,
270
+ auth: Optional[BasicAuth] = None,
271
+ version: http.HttpVersion = http.HttpVersion11,
272
+ compress: Optional[str] = None,
273
+ chunked: Optional[bool] = None,
274
+ expect100: bool = False,
275
+ loop: Optional[asyncio.AbstractEventLoop] = None,
276
+ response_class: Optional[Type["ClientResponse"]] = None,
277
+ proxy: Optional[URL] = None,
278
+ proxy_auth: Optional[BasicAuth] = None,
279
+ timer: Optional[BaseTimerContext] = None,
280
+ session: Optional["ClientSession"] = None,
281
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
282
+ proxy_headers: Optional[LooseHeaders] = None,
283
+ traces: Optional[List["Trace"]] = None,
284
+ trust_env: bool = False,
285
+ server_hostname: Optional[str] = None,
286
+ ):
287
+ if loop is None:
288
+ loop = asyncio.get_event_loop()
289
+
290
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
291
+ if match:
292
+ raise ValueError(
293
+ f"Method cannot contain non-token characters {method!r} "
294
+ "(found at least {match.group()!r})"
295
+ )
296
+
297
+ assert isinstance(url, URL), url
298
+ assert isinstance(proxy, (URL, type(None))), proxy
299
+ # FIXME: session is None in tests only, need to fix tests
300
+ # assert session is not None
301
+ self._session = cast("ClientSession", session)
302
+ if params:
303
+ q = MultiDict(url.query)
304
+ url2 = url.with_query(params)
305
+ q.extend(url2.query)
306
+ url = url.with_query(q)
307
+ self.original_url = url
308
+ self.url = url.with_fragment(None)
309
+ self.method = method.upper()
310
+ self.chunked = chunked
311
+ self.compress = compress
312
+ self.loop = loop
313
+ self.length = None
314
+ if response_class is None:
315
+ real_response_class = ClientResponse
316
+ else:
317
+ real_response_class = response_class
318
+ self.response_class: Type[ClientResponse] = real_response_class
319
+ self._timer = timer if timer is not None else TimerNoop()
320
+ self._ssl = ssl if ssl is not None else True
321
+ self.server_hostname = server_hostname
322
+
323
+ if loop.get_debug():
324
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
325
+
326
+ self.update_version(version)
327
+ self.update_host(url)
328
+ self.update_headers(headers)
329
+ self.update_auto_headers(skip_auto_headers)
330
+ self.update_cookies(cookies)
331
+ self.update_content_encoding(data)
332
+ self.update_auth(auth, trust_env)
333
+ self.update_proxy(proxy, proxy_auth, proxy_headers)
334
+
335
+ self.update_body_from_data(data)
336
+ if data is not None or self.method not in self.GET_METHODS:
337
+ self.update_transfer_encoding()
338
+ self.update_expect_continue(expect100)
339
+ if traces is None:
340
+ traces = []
341
+ self._traces = traces
342
+
343
+ def __reset_writer(self, _: object = None) -> None:
344
+ self.__writer = None
345
+
346
+ @property
347
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
348
+ return self.__writer
349
+
350
+ @_writer.setter
351
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
352
+ if self.__writer is not None:
353
+ self.__writer.remove_done_callback(self.__reset_writer)
354
+ self.__writer = writer
355
+ if writer is not None:
356
+ writer.add_done_callback(self.__reset_writer)
357
+
358
+ def is_ssl(self) -> bool:
359
+ return self.url.scheme in ("https", "wss")
360
+
361
+ @property
362
+ def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
363
+ return self._ssl
364
+
365
+ @property
366
+ def connection_key(self) -> ConnectionKey:
367
+ proxy_headers = self.proxy_headers
368
+ if proxy_headers:
369
+ h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items()))
370
+ else:
371
+ h = None
372
+ return ConnectionKey(
373
+ self.host,
374
+ self.port,
375
+ self.is_ssl(),
376
+ self.ssl,
377
+ self.proxy,
378
+ self.proxy_auth,
379
+ h,
380
+ )
381
+
382
+ @property
383
+ def host(self) -> str:
384
+ ret = self.url.raw_host
385
+ assert ret is not None
386
+ return ret
387
+
388
+ @property
389
+ def port(self) -> Optional[int]:
390
+ return self.url.port
391
+
392
+ @property
393
+ def request_info(self) -> RequestInfo:
394
+ headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
395
+ return RequestInfo(self.url, self.method, headers, self.original_url)
396
+
397
+ def update_host(self, url: URL) -> None:
398
+ """Update destination host, port and connection type (ssl)."""
399
+ # get host/port
400
+ if not url.raw_host:
401
+ raise InvalidURL(url)
402
+
403
+ # basic auth info
404
+ username, password = url.user, url.password
405
+ if username:
406
+ self.auth = helpers.BasicAuth(username, password or "")
407
+
408
+ def update_version(self, version: Union[http.HttpVersion, str]) -> None:
409
+ """Convert request version to two elements tuple.
410
+
411
+ parser HTTP version '1.1' => (1, 1)
412
+ """
413
+ if isinstance(version, str):
414
+ v = [part.strip() for part in version.split(".", 1)]
415
+ try:
416
+ version = http.HttpVersion(int(v[0]), int(v[1]))
417
+ except ValueError:
418
+ raise ValueError(
419
+ f"Can not parse http version number: {version}"
420
+ ) from None
421
+ self.version = version
422
+
423
+ def update_headers(self, headers: Optional[LooseHeaders]) -> None:
424
+ """Update request headers."""
425
+ self.headers: CIMultiDict[str] = CIMultiDict()
426
+
427
+ # add host
428
+ netloc = cast(str, self.url.raw_host)
429
+ if helpers.is_ipv6_address(netloc):
430
+ netloc = f"[{netloc}]"
431
+ # See https://github.com/aio-libs/aiohttp/issues/3636.
432
+ netloc = netloc.rstrip(".")
433
+ if self.url.port is not None and not self.url.is_default_port():
434
+ netloc += ":" + str(self.url.port)
435
+ self.headers[hdrs.HOST] = netloc
436
+
437
+ if headers:
438
+ if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
439
+ headers = headers.items() # type: ignore[assignment]
440
+
441
+ for key, value in headers: # type: ignore[misc]
442
+ # A special case for Host header
443
+ if key.lower() == "host":
444
+ self.headers[key] = value
445
+ else:
446
+ self.headers.add(key, value)
447
+
448
+ def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
449
+ self.skip_auto_headers = CIMultiDict(
450
+ (hdr, None) for hdr in sorted(skip_auto_headers)
451
+ )
452
+ used_headers = self.headers.copy()
453
+ used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
454
+
455
+ for hdr, val in self.DEFAULT_HEADERS.items():
456
+ if hdr not in used_headers:
457
+ self.headers.add(hdr, val)
458
+
459
+ if hdrs.USER_AGENT not in used_headers:
460
+ self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
461
+
462
+ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
463
+ """Update request cookies header."""
464
+ if not cookies:
465
+ return
466
+
467
+ c = SimpleCookie()
468
+ if hdrs.COOKIE in self.headers:
469
+ c.load(self.headers.get(hdrs.COOKIE, ""))
470
+ del self.headers[hdrs.COOKIE]
471
+
472
+ if isinstance(cookies, Mapping):
473
+ iter_cookies = cookies.items()
474
+ else:
475
+ iter_cookies = cookies # type: ignore[assignment]
476
+ for name, value in iter_cookies:
477
+ if isinstance(value, Morsel):
478
+ # Preserve coded_value
479
+ mrsl_val = value.get(value.key, Morsel())
480
+ mrsl_val.set(value.key, value.value, value.coded_value)
481
+ c[name] = mrsl_val
482
+ else:
483
+ c[name] = value # type: ignore[assignment]
484
+
485
+ self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
486
+
487
+ def update_content_encoding(self, data: Any) -> None:
488
+ """Set request content encoding."""
489
+ if data is None:
490
+ return
491
+
492
+ enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
493
+ if enc:
494
+ if self.compress:
495
+ raise ValueError(
496
+ "compress can not be set " "if Content-Encoding header is set"
497
+ )
498
+ elif self.compress:
499
+ if not isinstance(self.compress, str):
500
+ self.compress = "deflate"
501
+ self.headers[hdrs.CONTENT_ENCODING] = self.compress
502
+ self.chunked = True # enable chunked, no need to deal with length
503
+
504
+ def update_transfer_encoding(self) -> None:
505
+ """Analyze transfer-encoding header."""
506
+ te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
507
+
508
+ if "chunked" in te:
509
+ if self.chunked:
510
+ raise ValueError(
511
+ "chunked can not be set "
512
+ 'if "Transfer-Encoding: chunked" header is set'
513
+ )
514
+
515
+ elif self.chunked:
516
+ if hdrs.CONTENT_LENGTH in self.headers:
517
+ raise ValueError(
518
+ "chunked can not be set " "if Content-Length header is set"
519
+ )
520
+
521
+ self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
522
+ else:
523
+ if hdrs.CONTENT_LENGTH not in self.headers:
524
+ self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
525
+
526
+ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
527
+ """Set basic auth."""
528
+ if auth is None:
529
+ auth = self.auth
530
+ if auth is None and trust_env and self.url.host is not None:
531
+ netrc_obj = netrc_from_env()
532
+ with contextlib.suppress(LookupError):
533
+ auth = basicauth_from_netrc(netrc_obj, self.url.host)
534
+ if auth is None:
535
+ return
536
+
537
+ if not isinstance(auth, helpers.BasicAuth):
538
+ raise TypeError("BasicAuth() tuple is required instead")
539
+
540
+ self.headers[hdrs.AUTHORIZATION] = auth.encode()
541
+
542
+ def update_body_from_data(self, body: Any) -> None:
543
+ if body is None:
544
+ return
545
+
546
+ # FormData
547
+ if isinstance(body, FormData):
548
+ body = body()
549
+
550
+ try:
551
+ body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
552
+ except payload.LookupError:
553
+ body = FormData(body)()
554
+
555
+ self.body = body
556
+
557
+ # enable chunked encoding if needed
558
+ if not self.chunked:
559
+ if hdrs.CONTENT_LENGTH not in self.headers:
560
+ size = body.size
561
+ if size is None:
562
+ self.chunked = True
563
+ else:
564
+ if hdrs.CONTENT_LENGTH not in self.headers:
565
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
566
+
567
+ # copy payload headers
568
+ assert body.headers
569
+ for (key, value) in body.headers.items():
570
+ if key in self.headers:
571
+ continue
572
+ if key in self.skip_auto_headers:
573
+ continue
574
+ self.headers[key] = value
575
+
576
+ def update_expect_continue(self, expect: bool = False) -> None:
577
+ if expect:
578
+ self.headers[hdrs.EXPECT] = "100-continue"
579
+ elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
580
+ expect = True
581
+
582
+ if expect:
583
+ self._continue = self.loop.create_future()
584
+
585
+ def update_proxy(
586
+ self,
587
+ proxy: Optional[URL],
588
+ proxy_auth: Optional[BasicAuth],
589
+ proxy_headers: Optional[LooseHeaders],
590
+ ) -> None:
591
+ if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
592
+ raise ValueError("proxy_auth must be None or BasicAuth() tuple")
593
+ self.proxy = proxy
594
+ self.proxy_auth = proxy_auth
595
+ self.proxy_headers = proxy_headers
596
+
597
+ def keep_alive(self) -> bool:
598
+ if self.version < HttpVersion10:
599
+ # keep alive not supported at all
600
+ return False
601
+ if self.version == HttpVersion10:
602
+ if self.headers.get(hdrs.CONNECTION) == "keep-alive":
603
+ return True
604
+ else: # no headers means we close for Http 1.0
605
+ return False
606
+ elif self.headers.get(hdrs.CONNECTION) == "close":
607
+ return False
608
+
609
+ return True
610
+
611
+ async def write_bytes(
612
+ self, writer: AbstractStreamWriter, conn: "Connection"
613
+ ) -> None:
614
+ """Support coroutines that yields bytes objects."""
615
+ # 100 response
616
+ if self._continue is not None:
617
+ try:
618
+ await writer.drain()
619
+ await self._continue
620
+ except asyncio.CancelledError:
621
+ return
622
+
623
+ protocol = conn.protocol
624
+ assert protocol is not None
625
+ try:
626
+ if isinstance(self.body, payload.Payload):
627
+ await self.body.write(writer)
628
+ else:
629
+ if isinstance(self.body, (bytes, bytearray)):
630
+ self.body = (self.body,) # type: ignore[assignment]
631
+
632
+ for chunk in self.body:
633
+ await writer.write(chunk) # type: ignore[arg-type]
634
+ except OSError as underlying_exc:
635
+ reraised_exc = underlying_exc
636
+
637
+ exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
638
+ underlying_exc, asyncio.TimeoutError
639
+ )
640
+ if exc_is_not_timeout:
641
+ reraised_exc = ClientOSError(
642
+ underlying_exc.errno,
643
+ f"Can not write request body for {self.url !s}",
644
+ )
645
+
646
+ set_exception(protocol, reraised_exc, underlying_exc)
647
+ except asyncio.CancelledError:
648
+ await writer.write_eof()
649
+ except Exception as underlying_exc:
650
+ set_exception(
651
+ protocol,
652
+ ClientConnectionError(
653
+ f"Failed to send bytes into the underlying connection {conn !s}",
654
+ ),
655
+ underlying_exc,
656
+ )
657
+ else:
658
+ await writer.write_eof()
659
+ protocol.start_timeout()
660
+
661
+ async def send(self, conn: "Connection") -> "ClientResponse":
662
+ # Specify request target:
663
+ # - CONNECT request must send authority form URI
664
+ # - not CONNECT proxy must send absolute form URI
665
+ # - most common is origin form URI
666
+ if self.method == hdrs.METH_CONNECT:
667
+ connect_host = self.url.raw_host
668
+ assert connect_host is not None
669
+ if helpers.is_ipv6_address(connect_host):
670
+ connect_host = f"[{connect_host}]"
671
+ path = f"{connect_host}:{self.url.port}"
672
+ elif self.proxy and not self.is_ssl():
673
+ path = str(self.url)
674
+ else:
675
+ path = self.url.raw_path
676
+ if self.url.raw_query_string:
677
+ path += "?" + self.url.raw_query_string
678
+
679
+ protocol = conn.protocol
680
+ assert protocol is not None
681
+ writer = StreamWriter(
682
+ protocol,
683
+ self.loop,
684
+ on_chunk_sent=functools.partial(
685
+ self._on_chunk_request_sent, self.method, self.url
686
+ ),
687
+ on_headers_sent=functools.partial(
688
+ self._on_headers_request_sent, self.method, self.url
689
+ ),
690
+ )
691
+
692
+ if self.compress:
693
+ writer.enable_compression(self.compress)
694
+
695
+ if self.chunked is not None:
696
+ writer.enable_chunking()
697
+
698
+ # set default content-type
699
+ if (
700
+ self.method in self.POST_METHODS
701
+ and hdrs.CONTENT_TYPE not in self.skip_auto_headers
702
+ and hdrs.CONTENT_TYPE not in self.headers
703
+ ):
704
+ self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
705
+
706
+ # set the connection header
707
+ connection = self.headers.get(hdrs.CONNECTION)
708
+ if not connection:
709
+ if self.keep_alive():
710
+ if self.version == HttpVersion10:
711
+ connection = "keep-alive"
712
+ else:
713
+ if self.version == HttpVersion11:
714
+ connection = "close"
715
+
716
+ if connection is not None:
717
+ self.headers[hdrs.CONNECTION] = connection
718
+
719
+ # status + headers
720
+ status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format(
721
+ self.method, path, v=self.version
722
+ )
723
+ await writer.write_headers(status_line, self.headers)
724
+
725
+ self._writer = self.loop.create_task(self.write_bytes(writer, conn))
726
+
727
+ response_class = self.response_class
728
+ assert response_class is not None
729
+ self.response = response_class(
730
+ self.method,
731
+ self.original_url,
732
+ writer=self._writer,
733
+ continue100=self._continue,
734
+ timer=self._timer,
735
+ request_info=self.request_info,
736
+ traces=self._traces,
737
+ loop=self.loop,
738
+ session=self._session,
739
+ )
740
+ return self.response
741
+
742
+ async def close(self) -> None:
743
+ if self._writer is not None:
744
+ with contextlib.suppress(asyncio.CancelledError):
745
+ await self._writer
746
+
747
+ def terminate(self) -> None:
748
+ if self._writer is not None:
749
+ if not self.loop.is_closed():
750
+ self._writer.cancel()
751
+ self._writer.remove_done_callback(self.__reset_writer)
752
+ self._writer = None
753
+
754
+ async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
755
+ for trace in self._traces:
756
+ await trace.send_request_chunk_sent(method, url, chunk)
757
+
758
+ async def _on_headers_request_sent(
759
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
760
+ ) -> None:
761
+ for trace in self._traces:
762
+ await trace.send_request_headers(method, url, headers)
763
+
764
+
765
+ class ClientResponse(HeadersMixin):
766
+
767
+ # Some of these attributes are None when created,
768
+ # but will be set by the start() method.
769
+ # As the end user will likely never see the None values, we cheat the types below.
770
+ # from the Status-Line of the response
771
+ version: Optional[HttpVersion] = None # HTTP-Version
772
+ status: int = None # type: ignore[assignment] # Status-Code
773
+ reason: Optional[str] = None # Reason-Phrase
774
+
775
+ content: StreamReader = None # type: ignore[assignment] # Payload stream
776
+ _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
777
+ _raw_headers: RawHeaders = None # type: ignore[assignment]
778
+
779
+ _connection = None # current connection
780
+ _source_traceback: Optional[traceback.StackSummary] = None
781
+ # set up by ClientRequest after ClientResponse object creation
782
+ # post-init stage allows to not change ctor signature
783
+ _closed = True # to allow __del__ for non-initialized properly response
784
+ _released = False
785
+ __writer = None
786
+
787
+ def __init__(
788
+ self,
789
+ method: str,
790
+ url: URL,
791
+ *,
792
+ writer: "asyncio.Task[None]",
793
+ continue100: Optional["asyncio.Future[bool]"],
794
+ timer: BaseTimerContext,
795
+ request_info: RequestInfo,
796
+ traces: List["Trace"],
797
+ loop: asyncio.AbstractEventLoop,
798
+ session: "ClientSession",
799
+ ) -> None:
800
+ assert isinstance(url, URL)
801
+
802
+ self.method = method
803
+ self.cookies = SimpleCookie()
804
+
805
+ self._real_url = url
806
+ self._url = url.with_fragment(None)
807
+ self._body: Any = None
808
+ self._writer: Optional[asyncio.Task[None]] = writer
809
+ self._continue = continue100 # None by default
810
+ self._closed = True
811
+ self._history: Tuple[ClientResponse, ...] = ()
812
+ self._request_info = request_info
813
+ self._timer = timer if timer is not None else TimerNoop()
814
+ self._cache: Dict[str, Any] = {}
815
+ self._traces = traces
816
+ self._loop = loop
817
+ # store a reference to session #1985
818
+ self._session: Optional[ClientSession] = session
819
+ # Save reference to _resolve_charset, so that get_encoding() will still
820
+ # work after the response has finished reading the body.
821
+ if session is None:
822
+ # TODO: Fix session=None in tests (see ClientRequest.__init__).
823
+ self._resolve_charset: Callable[
824
+ ["ClientResponse", bytes], str
825
+ ] = lambda *_: "utf-8"
826
+ else:
827
+ self._resolve_charset = session._resolve_charset
828
+ if loop.get_debug():
829
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
830
+
831
+ def __reset_writer(self, _: object = None) -> None:
832
+ self.__writer = None
833
+
834
+ @property
835
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
836
+ return self.__writer
837
+
838
+ @_writer.setter
839
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
840
+ if self.__writer is not None:
841
+ self.__writer.remove_done_callback(self.__reset_writer)
842
+ self.__writer = writer
843
+ if writer is not None:
844
+ writer.add_done_callback(self.__reset_writer)
845
+
846
+ @reify
847
+ def url(self) -> URL:
848
+ return self._url
849
+
850
+ @reify
851
+ def url_obj(self) -> URL:
852
+ warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
853
+ return self._url
854
+
855
+ @reify
856
+ def real_url(self) -> URL:
857
+ return self._real_url
858
+
859
+ @reify
860
+ def host(self) -> str:
861
+ assert self._url.host is not None
862
+ return self._url.host
863
+
864
+ @reify
865
+ def headers(self) -> "CIMultiDictProxy[str]":
866
+ return self._headers
867
+
868
+ @reify
869
+ def raw_headers(self) -> RawHeaders:
870
+ return self._raw_headers
871
+
872
+ @reify
873
+ def request_info(self) -> RequestInfo:
874
+ return self._request_info
875
+
876
+ @reify
877
+ def content_disposition(self) -> Optional[ContentDisposition]:
878
+ raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
879
+ if raw is None:
880
+ return None
881
+ disposition_type, params_dct = multipart.parse_content_disposition(raw)
882
+ params = MappingProxyType(params_dct)
883
+ filename = multipart.content_disposition_filename(params)
884
+ return ContentDisposition(disposition_type, params, filename)
885
+
886
+ def __del__(self, _warnings: Any = warnings) -> None:
887
+ if self._closed:
888
+ return
889
+
890
+ if self._connection is not None:
891
+ self._connection.release()
892
+ self._cleanup_writer()
893
+
894
+ if self._loop.get_debug():
895
+ kwargs = {"source": self}
896
+ _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
897
+ context = {"client_response": self, "message": "Unclosed response"}
898
+ if self._source_traceback:
899
+ context["source_traceback"] = self._source_traceback
900
+ self._loop.call_exception_handler(context)
901
+
902
+ def __repr__(self) -> str:
903
+ out = io.StringIO()
904
+ ascii_encodable_url = str(self.url)
905
+ if self.reason:
906
+ ascii_encodable_reason = self.reason.encode(
907
+ "ascii", "backslashreplace"
908
+ ).decode("ascii")
909
+ else:
910
+ ascii_encodable_reason = "None"
911
+ print(
912
+ "<ClientResponse({}) [{} {}]>".format(
913
+ ascii_encodable_url, self.status, ascii_encodable_reason
914
+ ),
915
+ file=out,
916
+ )
917
+ print(self.headers, file=out)
918
+ return out.getvalue()
919
+
920
+ @property
921
+ def connection(self) -> Optional["Connection"]:
922
+ return self._connection
923
+
924
+ @reify
925
+ def history(self) -> Tuple["ClientResponse", ...]:
926
+ """A sequence of of responses, if redirects occurred."""
927
+ return self._history
928
+
929
+ @reify
930
+ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
931
+ links_str = ", ".join(self.headers.getall("link", []))
932
+
933
+ if not links_str:
934
+ return MultiDictProxy(MultiDict())
935
+
936
+ links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
937
+
938
+ for val in re.split(r",(?=\s*<)", links_str):
939
+ match = re.match(r"\s*<(.*)>(.*)", val)
940
+ if match is None: # pragma: no cover
941
+ # the check exists to suppress mypy error
942
+ continue
943
+ url, params_str = match.groups()
944
+ params = params_str.split(";")[1:]
945
+
946
+ link: MultiDict[Union[str, URL]] = MultiDict()
947
+
948
+ for param in params:
949
+ match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
950
+ if match is None: # pragma: no cover
951
+ # the check exists to suppress mypy error
952
+ continue
953
+ key, _, value, _ = match.groups()
954
+
955
+ link.add(key, value)
956
+
957
+ key = link.get("rel", url)
958
+
959
+ link.add("url", self.url.join(URL(url)))
960
+
961
+ links.add(str(key), MultiDictProxy(link))
962
+
963
+ return MultiDictProxy(links)
964
+
965
+ async def start(self, connection: "Connection") -> "ClientResponse":
966
+ """Start response processing."""
967
+ self._closed = False
968
+ self._protocol = connection.protocol
969
+ self._connection = connection
970
+
971
+ with self._timer:
972
+ while True:
973
+ # read response
974
+ try:
975
+ protocol = self._protocol
976
+ message, payload = await protocol.read() # type: ignore[union-attr]
977
+ except http.HttpProcessingError as exc:
978
+ raise ClientResponseError(
979
+ self.request_info,
980
+ self.history,
981
+ status=exc.code,
982
+ message=exc.message,
983
+ headers=exc.headers,
984
+ ) from exc
985
+
986
+ if message.code < 100 or message.code > 199 or message.code == 101:
987
+ break
988
+
989
+ if self._continue is not None:
990
+ set_result(self._continue, True)
991
+ self._continue = None
992
+
993
+ # payload eof handler
994
+ payload.on_eof(self._response_eof)
995
+
996
+ # response status
997
+ self.version = message.version
998
+ self.status = message.code
999
+ self.reason = message.reason
1000
+
1001
+ # headers
1002
+ self._headers = message.headers # type is CIMultiDictProxy
1003
+ self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
1004
+
1005
+ # payload
1006
+ self.content = payload
1007
+
1008
+ # cookies
1009
+ for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
1010
+ try:
1011
+ self.cookies.load(hdr)
1012
+ except CookieError as exc:
1013
+ client_logger.warning("Can not load response cookies: %s", exc)
1014
+ return self
1015
+
1016
+ def _response_eof(self) -> None:
1017
+ if self._closed:
1018
+ return
1019
+
1020
+ # protocol could be None because connection could be detached
1021
+ protocol = self._connection and self._connection.protocol
1022
+ if protocol is not None and protocol.upgraded:
1023
+ return
1024
+
1025
+ self._closed = True
1026
+ self._cleanup_writer()
1027
+ self._release_connection()
1028
+
1029
+ @property
1030
+ def closed(self) -> bool:
1031
+ return self._closed
1032
+
1033
+ def close(self) -> None:
1034
+ if not self._released:
1035
+ self._notify_content()
1036
+
1037
+ self._closed = True
1038
+ if self._loop is None or self._loop.is_closed():
1039
+ return
1040
+
1041
+ self._cleanup_writer()
1042
+ if self._connection is not None:
1043
+ self._connection.close()
1044
+ self._connection = None
1045
+
1046
+ def release(self) -> Any:
1047
+ if not self._released:
1048
+ self._notify_content()
1049
+
1050
+ self._closed = True
1051
+
1052
+ self._cleanup_writer()
1053
+ self._release_connection()
1054
+ return noop()
1055
+
1056
+ @property
1057
+ def ok(self) -> bool:
1058
+ """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
1059
+
1060
+ This is **not** a check for ``200 OK`` but a check that the response
1061
+ status is under 400.
1062
+ """
1063
+ return 400 > self.status
1064
+
1065
+ def raise_for_status(self) -> None:
1066
+ if not self.ok:
1067
+ # reason should always be not None for a started response
1068
+ assert self.reason is not None
1069
+ self.release()
1070
+ raise ClientResponseError(
1071
+ self.request_info,
1072
+ self.history,
1073
+ status=self.status,
1074
+ message=self.reason,
1075
+ headers=self.headers,
1076
+ )
1077
+
1078
+ def _release_connection(self) -> None:
1079
+ if self._connection is not None:
1080
+ if self._writer is None:
1081
+ self._connection.release()
1082
+ self._connection = None
1083
+ else:
1084
+ self._writer.add_done_callback(lambda f: self._release_connection())
1085
+
1086
+ async def _wait_released(self) -> None:
1087
+ if self._writer is not None:
1088
+ await self._writer
1089
+ self._release_connection()
1090
+
1091
+ def _cleanup_writer(self) -> None:
1092
+ if self._writer is not None:
1093
+ self._writer.cancel()
1094
+ self._session = None
1095
+
1096
+ def _notify_content(self) -> None:
1097
+ content = self.content
1098
+ if content and content.exception() is None:
1099
+ set_exception(content, ClientConnectionError("Connection closed"))
1100
+ self._released = True
1101
+
1102
+ async def wait_for_close(self) -> None:
1103
+ if self._writer is not None:
1104
+ await self._writer
1105
+ self.release()
1106
+
1107
+ async def read(self) -> bytes:
1108
+ """Read response payload."""
1109
+ if self._body is None:
1110
+ try:
1111
+ self._body = await self.content.read()
1112
+ for trace in self._traces:
1113
+ await trace.send_response_chunk_received(
1114
+ self.method, self.url, self._body
1115
+ )
1116
+ except BaseException:
1117
+ self.close()
1118
+ raise
1119
+ elif self._released: # Response explicitly released
1120
+ raise ClientConnectionError("Connection closed")
1121
+
1122
+ protocol = self._connection and self._connection.protocol
1123
+ if protocol is None or not protocol.upgraded:
1124
+ await self._wait_released() # Underlying connection released
1125
+ return self._body # type: ignore[no-any-return]
1126
+
1127
+ def get_encoding(self) -> str:
1128
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
1129
+ mimetype = helpers.parse_mimetype(ctype)
1130
+
1131
+ encoding = mimetype.parameters.get("charset")
1132
+ if encoding:
1133
+ with contextlib.suppress(LookupError):
1134
+ return codecs.lookup(encoding).name
1135
+
1136
+ if mimetype.type == "application" and (
1137
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
1138
+ ):
1139
+ # RFC 7159 states that the default encoding is UTF-8.
1140
+ # RFC 7483 defines application/rdap+json
1141
+ return "utf-8"
1142
+
1143
+ if self._body is None:
1144
+ raise RuntimeError(
1145
+ "Cannot compute fallback encoding of a not yet read body"
1146
+ )
1147
+
1148
+ return self._resolve_charset(self, self._body)
1149
+
1150
+ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
1151
+ """Read response payload and decode."""
1152
+ if self._body is None:
1153
+ await self.read()
1154
+
1155
+ if encoding is None:
1156
+ encoding = self.get_encoding()
1157
+
1158
+ return self._body.decode( # type: ignore[no-any-return,union-attr]
1159
+ encoding, errors=errors
1160
+ )
1161
+
1162
+ async def json(
1163
+ self,
1164
+ *,
1165
+ encoding: Optional[str] = None,
1166
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
1167
+ content_type: Optional[str] = "application/json",
1168
+ ) -> Any:
1169
+ """Read and decodes JSON response."""
1170
+ if self._body is None:
1171
+ await self.read()
1172
+
1173
+ if content_type:
1174
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
1175
+ if not _is_expected_content_type(ctype, content_type):
1176
+ raise ContentTypeError(
1177
+ self.request_info,
1178
+ self.history,
1179
+ message=(
1180
+ "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
1181
+ ),
1182
+ headers=self.headers,
1183
+ )
1184
+
1185
+ stripped = self._body.strip() # type: ignore[union-attr]
1186
+ if not stripped:
1187
+ return None
1188
+
1189
+ if encoding is None:
1190
+ encoding = self.get_encoding()
1191
+
1192
+ return loads(stripped.decode(encoding))
1193
+
1194
+ async def __aenter__(self) -> "ClientResponse":
1195
+ return self
1196
+
1197
+ async def __aexit__(
1198
+ self,
1199
+ exc_type: Optional[Type[BaseException]],
1200
+ exc_val: Optional[BaseException],
1201
+ exc_tb: Optional[TracebackType],
1202
+ ) -> None:
1203
+ # similar to _RequestContextManager, we do not need to check
1204
+ # for exceptions, response object can close connection
1205
+ # if state is broken
1206
+ self.release()
1207
+ await self.wait_for_close()
env-llmeval/lib/python3.10/site-packages/aiohttp/client_ws.py ADDED
@@ -0,0 +1,315 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """WebSocket client for asyncio."""
2
+
3
+ import asyncio
4
+ import sys
5
+ from typing import Any, Optional, cast
6
+
7
+ from .client_exceptions import ClientError
8
+ from .client_reqrep import ClientResponse
9
+ from .helpers import call_later, set_result
10
+ from .http import (
11
+ WS_CLOSED_MESSAGE,
12
+ WS_CLOSING_MESSAGE,
13
+ WebSocketError,
14
+ WSCloseCode,
15
+ WSMessage,
16
+ WSMsgType,
17
+ )
18
+ from .http_websocket import WebSocketWriter # WSMessage
19
+ from .streams import EofStream, FlowControlDataQueue
20
+ from .typedefs import (
21
+ DEFAULT_JSON_DECODER,
22
+ DEFAULT_JSON_ENCODER,
23
+ JSONDecoder,
24
+ JSONEncoder,
25
+ )
26
+
27
+ if sys.version_info >= (3, 11):
28
+ import asyncio as async_timeout
29
+ else:
30
+ import async_timeout
31
+
32
+
33
+ class ClientWebSocketResponse:
34
+ def __init__(
35
+ self,
36
+ reader: "FlowControlDataQueue[WSMessage]",
37
+ writer: WebSocketWriter,
38
+ protocol: Optional[str],
39
+ response: ClientResponse,
40
+ timeout: float,
41
+ autoclose: bool,
42
+ autoping: bool,
43
+ loop: asyncio.AbstractEventLoop,
44
+ *,
45
+ receive_timeout: Optional[float] = None,
46
+ heartbeat: Optional[float] = None,
47
+ compress: int = 0,
48
+ client_notakeover: bool = False,
49
+ ) -> None:
50
+ self._response = response
51
+ self._conn = response.connection
52
+
53
+ self._writer = writer
54
+ self._reader = reader
55
+ self._protocol = protocol
56
+ self._closed = False
57
+ self._closing = False
58
+ self._close_code: Optional[int] = None
59
+ self._timeout = timeout
60
+ self._receive_timeout = receive_timeout
61
+ self._autoclose = autoclose
62
+ self._autoping = autoping
63
+ self._heartbeat = heartbeat
64
+ self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
65
+ if heartbeat is not None:
66
+ self._pong_heartbeat = heartbeat / 2.0
67
+ self._pong_response_cb: Optional[asyncio.TimerHandle] = None
68
+ self._loop = loop
69
+ self._waiting: Optional[asyncio.Future[bool]] = None
70
+ self._exception: Optional[BaseException] = None
71
+ self._compress = compress
72
+ self._client_notakeover = client_notakeover
73
+
74
+ self._reset_heartbeat()
75
+
76
+ def _cancel_heartbeat(self) -> None:
77
+ if self._pong_response_cb is not None:
78
+ self._pong_response_cb.cancel()
79
+ self._pong_response_cb = None
80
+
81
+ if self._heartbeat_cb is not None:
82
+ self._heartbeat_cb.cancel()
83
+ self._heartbeat_cb = None
84
+
85
+ def _reset_heartbeat(self) -> None:
86
+ self._cancel_heartbeat()
87
+
88
+ if self._heartbeat is not None:
89
+ self._heartbeat_cb = call_later(
90
+ self._send_heartbeat,
91
+ self._heartbeat,
92
+ self._loop,
93
+ timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
94
+ if self._conn is not None
95
+ else 5,
96
+ )
97
+
98
+ def _send_heartbeat(self) -> None:
99
+ if self._heartbeat is not None and not self._closed:
100
+ # fire-and-forget a task is not perfect but maybe ok for
101
+ # sending ping. Otherwise we need a long-living heartbeat
102
+ # task in the class.
103
+ self._loop.create_task(self._writer.ping())
104
+
105
+ if self._pong_response_cb is not None:
106
+ self._pong_response_cb.cancel()
107
+ self._pong_response_cb = call_later(
108
+ self._pong_not_received,
109
+ self._pong_heartbeat,
110
+ self._loop,
111
+ timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
112
+ if self._conn is not None
113
+ else 5,
114
+ )
115
+
116
+ def _pong_not_received(self) -> None:
117
+ if not self._closed:
118
+ self._closed = True
119
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
120
+ self._exception = asyncio.TimeoutError()
121
+ self._response.close()
122
+
123
+ @property
124
+ def closed(self) -> bool:
125
+ return self._closed
126
+
127
+ @property
128
+ def close_code(self) -> Optional[int]:
129
+ return self._close_code
130
+
131
+ @property
132
+ def protocol(self) -> Optional[str]:
133
+ return self._protocol
134
+
135
+ @property
136
+ def compress(self) -> int:
137
+ return self._compress
138
+
139
+ @property
140
+ def client_notakeover(self) -> bool:
141
+ return self._client_notakeover
142
+
143
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
144
+ """extra info from connection transport"""
145
+ conn = self._response.connection
146
+ if conn is None:
147
+ return default
148
+ transport = conn.transport
149
+ if transport is None:
150
+ return default
151
+ return transport.get_extra_info(name, default)
152
+
153
+ def exception(self) -> Optional[BaseException]:
154
+ return self._exception
155
+
156
+ async def ping(self, message: bytes = b"") -> None:
157
+ await self._writer.ping(message)
158
+
159
+ async def pong(self, message: bytes = b"") -> None:
160
+ await self._writer.pong(message)
161
+
162
+ async def send_str(self, data: str, compress: Optional[int] = None) -> None:
163
+ if not isinstance(data, str):
164
+ raise TypeError("data argument must be str (%r)" % type(data))
165
+ await self._writer.send(data, binary=False, compress=compress)
166
+
167
+ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
168
+ if not isinstance(data, (bytes, bytearray, memoryview)):
169
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
170
+ await self._writer.send(data, binary=True, compress=compress)
171
+
172
+ async def send_json(
173
+ self,
174
+ data: Any,
175
+ compress: Optional[int] = None,
176
+ *,
177
+ dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
178
+ ) -> None:
179
+ await self.send_str(dumps(data), compress=compress)
180
+
181
+ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
182
+ # we need to break `receive()` cycle first,
183
+ # `close()` may be called from different task
184
+ if self._waiting is not None and not self._closing:
185
+ self._closing = True
186
+ self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
187
+ await self._waiting
188
+
189
+ if not self._closed:
190
+ self._cancel_heartbeat()
191
+ self._closed = True
192
+ try:
193
+ await self._writer.close(code, message)
194
+ except asyncio.CancelledError:
195
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
196
+ self._response.close()
197
+ raise
198
+ except Exception as exc:
199
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
200
+ self._exception = exc
201
+ self._response.close()
202
+ return True
203
+
204
+ if self._close_code:
205
+ self._response.close()
206
+ return True
207
+
208
+ while True:
209
+ try:
210
+ async with async_timeout.timeout(self._timeout):
211
+ msg = await self._reader.read()
212
+ except asyncio.CancelledError:
213
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
214
+ self._response.close()
215
+ raise
216
+ except Exception as exc:
217
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
218
+ self._exception = exc
219
+ self._response.close()
220
+ return True
221
+
222
+ if msg.type == WSMsgType.CLOSE:
223
+ self._close_code = msg.data
224
+ self._response.close()
225
+ return True
226
+ else:
227
+ return False
228
+
229
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
230
+ while True:
231
+ if self._waiting is not None:
232
+ raise RuntimeError("Concurrent call to receive() is not allowed")
233
+
234
+ if self._closed:
235
+ return WS_CLOSED_MESSAGE
236
+ elif self._closing:
237
+ await self.close()
238
+ return WS_CLOSED_MESSAGE
239
+
240
+ try:
241
+ self._waiting = self._loop.create_future()
242
+ try:
243
+ async with async_timeout.timeout(timeout or self._receive_timeout):
244
+ msg = await self._reader.read()
245
+ self._reset_heartbeat()
246
+ finally:
247
+ waiter = self._waiting
248
+ self._waiting = None
249
+ set_result(waiter, True)
250
+ except (asyncio.CancelledError, asyncio.TimeoutError):
251
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
252
+ raise
253
+ except EofStream:
254
+ self._close_code = WSCloseCode.OK
255
+ await self.close()
256
+ return WSMessage(WSMsgType.CLOSED, None, None)
257
+ except ClientError:
258
+ self._closed = True
259
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
260
+ return WS_CLOSED_MESSAGE
261
+ except WebSocketError as exc:
262
+ self._close_code = exc.code
263
+ await self.close(code=exc.code)
264
+ return WSMessage(WSMsgType.ERROR, exc, None)
265
+ except Exception as exc:
266
+ self._exception = exc
267
+ self._closing = True
268
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
269
+ await self.close()
270
+ return WSMessage(WSMsgType.ERROR, exc, None)
271
+
272
+ if msg.type == WSMsgType.CLOSE:
273
+ self._closing = True
274
+ self._close_code = msg.data
275
+ if not self._closed and self._autoclose:
276
+ await self.close()
277
+ elif msg.type == WSMsgType.CLOSING:
278
+ self._closing = True
279
+ elif msg.type == WSMsgType.PING and self._autoping:
280
+ await self.pong(msg.data)
281
+ continue
282
+ elif msg.type == WSMsgType.PONG and self._autoping:
283
+ continue
284
+
285
+ return msg
286
+
287
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
288
+ msg = await self.receive(timeout)
289
+ if msg.type != WSMsgType.TEXT:
290
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
291
+ return cast(str, msg.data)
292
+
293
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
294
+ msg = await self.receive(timeout)
295
+ if msg.type != WSMsgType.BINARY:
296
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
297
+ return cast(bytes, msg.data)
298
+
299
+ async def receive_json(
300
+ self,
301
+ *,
302
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
303
+ timeout: Optional[float] = None,
304
+ ) -> Any:
305
+ data = await self.receive_str(timeout=timeout)
306
+ return loads(data)
307
+
308
+ def __aiter__(self) -> "ClientWebSocketResponse":
309
+ return self
310
+
311
+ async def __anext__(self) -> WSMessage:
312
+ msg = await self.receive()
313
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
314
+ raise StopAsyncIteration
315
+ return msg
env-llmeval/lib/python3.10/site-packages/aiohttp/compression_utils.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import zlib
3
+ from concurrent.futures import Executor
4
+ from typing import Optional, cast
5
+
6
+ try:
7
+ try:
8
+ import brotlicffi as brotli
9
+ except ImportError:
10
+ import brotli
11
+
12
+ HAS_BROTLI = True
13
+ except ImportError: # pragma: no cover
14
+ HAS_BROTLI = False
15
+
16
+ MAX_SYNC_CHUNK_SIZE = 1024
17
+
18
+
19
+ def encoding_to_mode(
20
+ encoding: Optional[str] = None,
21
+ suppress_deflate_header: bool = False,
22
+ ) -> int:
23
+ if encoding == "gzip":
24
+ return 16 + zlib.MAX_WBITS
25
+
26
+ return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
27
+
28
+
29
+ class ZlibBaseHandler:
30
+ def __init__(
31
+ self,
32
+ mode: int,
33
+ executor: Optional[Executor] = None,
34
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
35
+ ):
36
+ self._mode = mode
37
+ self._executor = executor
38
+ self._max_sync_chunk_size = max_sync_chunk_size
39
+
40
+
41
+ class ZLibCompressor(ZlibBaseHandler):
42
+ def __init__(
43
+ self,
44
+ encoding: Optional[str] = None,
45
+ suppress_deflate_header: bool = False,
46
+ level: Optional[int] = None,
47
+ wbits: Optional[int] = None,
48
+ strategy: int = zlib.Z_DEFAULT_STRATEGY,
49
+ executor: Optional[Executor] = None,
50
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
51
+ ):
52
+ super().__init__(
53
+ mode=encoding_to_mode(encoding, suppress_deflate_header)
54
+ if wbits is None
55
+ else wbits,
56
+ executor=executor,
57
+ max_sync_chunk_size=max_sync_chunk_size,
58
+ )
59
+ if level is None:
60
+ self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
61
+ else:
62
+ self._compressor = zlib.compressobj(
63
+ wbits=self._mode, strategy=strategy, level=level
64
+ )
65
+ self._compress_lock = asyncio.Lock()
66
+
67
+ def compress_sync(self, data: bytes) -> bytes:
68
+ return self._compressor.compress(data)
69
+
70
+ async def compress(self, data: bytes) -> bytes:
71
+ async with self._compress_lock:
72
+ # To ensure the stream is consistent in the event
73
+ # there are multiple writers, we need to lock
74
+ # the compressor so that only one writer can
75
+ # compress at a time.
76
+ if (
77
+ self._max_sync_chunk_size is not None
78
+ and len(data) > self._max_sync_chunk_size
79
+ ):
80
+ return await asyncio.get_event_loop().run_in_executor(
81
+ self._executor, self.compress_sync, data
82
+ )
83
+ return self.compress_sync(data)
84
+
85
+ def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
86
+ return self._compressor.flush(mode)
87
+
88
+
89
+ class ZLibDecompressor(ZlibBaseHandler):
90
+ def __init__(
91
+ self,
92
+ encoding: Optional[str] = None,
93
+ suppress_deflate_header: bool = False,
94
+ executor: Optional[Executor] = None,
95
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
96
+ ):
97
+ super().__init__(
98
+ mode=encoding_to_mode(encoding, suppress_deflate_header),
99
+ executor=executor,
100
+ max_sync_chunk_size=max_sync_chunk_size,
101
+ )
102
+ self._decompressor = zlib.decompressobj(wbits=self._mode)
103
+
104
+ def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
105
+ return self._decompressor.decompress(data, max_length)
106
+
107
+ async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
108
+ if (
109
+ self._max_sync_chunk_size is not None
110
+ and len(data) > self._max_sync_chunk_size
111
+ ):
112
+ return await asyncio.get_event_loop().run_in_executor(
113
+ self._executor, self.decompress_sync, data, max_length
114
+ )
115
+ return self.decompress_sync(data, max_length)
116
+
117
+ def flush(self, length: int = 0) -> bytes:
118
+ return (
119
+ self._decompressor.flush(length)
120
+ if length > 0
121
+ else self._decompressor.flush()
122
+ )
123
+
124
+ @property
125
+ def eof(self) -> bool:
126
+ return self._decompressor.eof
127
+
128
+ @property
129
+ def unconsumed_tail(self) -> bytes:
130
+ return self._decompressor.unconsumed_tail
131
+
132
+ @property
133
+ def unused_data(self) -> bytes:
134
+ return self._decompressor.unused_data
135
+
136
+
137
+ class BrotliDecompressor:
138
+ # Supports both 'brotlipy' and 'Brotli' packages
139
+ # since they share an import name. The top branches
140
+ # are for 'brotlipy' and bottom branches for 'Brotli'
141
+ def __init__(self) -> None:
142
+ if not HAS_BROTLI:
143
+ raise RuntimeError(
144
+ "The brotli decompression is not available. "
145
+ "Please install `Brotli` module"
146
+ )
147
+ self._obj = brotli.Decompressor()
148
+
149
+ def decompress_sync(self, data: bytes) -> bytes:
150
+ if hasattr(self._obj, "decompress"):
151
+ return cast(bytes, self._obj.decompress(data))
152
+ return cast(bytes, self._obj.process(data))
153
+
154
+ def flush(self) -> bytes:
155
+ if hasattr(self._obj, "flush"):
156
+ return cast(bytes, self._obj.flush())
157
+ return b""
env-llmeval/lib/python3.10/site-packages/aiohttp/connector.py ADDED
@@ -0,0 +1,1511 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import functools
3
+ import random
4
+ import sys
5
+ import traceback
6
+ import warnings
7
+ from collections import defaultdict, deque
8
+ from contextlib import suppress
9
+ from http import HTTPStatus
10
+ from http.cookies import SimpleCookie
11
+ from itertools import cycle, islice
12
+ from time import monotonic
13
+ from types import TracebackType
14
+ from typing import (
15
+ TYPE_CHECKING,
16
+ Any,
17
+ Awaitable,
18
+ Callable,
19
+ DefaultDict,
20
+ Dict,
21
+ Iterator,
22
+ List,
23
+ Literal,
24
+ Optional,
25
+ Set,
26
+ Tuple,
27
+ Type,
28
+ Union,
29
+ cast,
30
+ )
31
+
32
+ import attr
33
+
34
+ from . import hdrs, helpers
35
+ from .abc import AbstractResolver
36
+ from .client_exceptions import (
37
+ ClientConnectionError,
38
+ ClientConnectorCertificateError,
39
+ ClientConnectorError,
40
+ ClientConnectorSSLError,
41
+ ClientHttpProxyError,
42
+ ClientProxyConnectionError,
43
+ ServerFingerprintMismatch,
44
+ UnixClientConnectorError,
45
+ cert_errors,
46
+ ssl_errors,
47
+ )
48
+ from .client_proto import ResponseHandler
49
+ from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
50
+ from .helpers import ceil_timeout, get_running_loop, is_ip_address, noop, sentinel
51
+ from .locks import EventResultOrError
52
+ from .resolver import DefaultResolver
53
+
54
+ try:
55
+ import ssl
56
+
57
+ SSLContext = ssl.SSLContext
58
+ except ImportError: # pragma: no cover
59
+ ssl = None # type: ignore[assignment]
60
+ SSLContext = object # type: ignore[misc,assignment]
61
+
62
+
63
+ __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
64
+
65
+
66
+ if TYPE_CHECKING:
67
+ from .client import ClientTimeout
68
+ from .client_reqrep import ConnectionKey
69
+ from .tracing import Trace
70
+
71
+
72
+ class _DeprecationWaiter:
73
+ __slots__ = ("_awaitable", "_awaited")
74
+
75
+ def __init__(self, awaitable: Awaitable[Any]) -> None:
76
+ self._awaitable = awaitable
77
+ self._awaited = False
78
+
79
+ def __await__(self) -> Any:
80
+ self._awaited = True
81
+ return self._awaitable.__await__()
82
+
83
+ def __del__(self) -> None:
84
+ if not self._awaited:
85
+ warnings.warn(
86
+ "Connector.close() is a coroutine, "
87
+ "please use await connector.close()",
88
+ DeprecationWarning,
89
+ )
90
+
91
+
92
+ class Connection:
93
+
94
+ _source_traceback = None
95
+ _transport = None
96
+
97
+ def __init__(
98
+ self,
99
+ connector: "BaseConnector",
100
+ key: "ConnectionKey",
101
+ protocol: ResponseHandler,
102
+ loop: asyncio.AbstractEventLoop,
103
+ ) -> None:
104
+ self._key = key
105
+ self._connector = connector
106
+ self._loop = loop
107
+ self._protocol: Optional[ResponseHandler] = protocol
108
+ self._callbacks: List[Callable[[], None]] = []
109
+
110
+ if loop.get_debug():
111
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
112
+
113
+ def __repr__(self) -> str:
114
+ return f"Connection<{self._key}>"
115
+
116
+ def __del__(self, _warnings: Any = warnings) -> None:
117
+ if self._protocol is not None:
118
+ kwargs = {"source": self}
119
+ _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
120
+ if self._loop.is_closed():
121
+ return
122
+
123
+ self._connector._release(self._key, self._protocol, should_close=True)
124
+
125
+ context = {"client_connection": self, "message": "Unclosed connection"}
126
+ if self._source_traceback is not None:
127
+ context["source_traceback"] = self._source_traceback
128
+ self._loop.call_exception_handler(context)
129
+
130
+ def __bool__(self) -> Literal[True]:
131
+ """Force subclasses to not be falsy, to make checks simpler."""
132
+ return True
133
+
134
+ @property
135
+ def loop(self) -> asyncio.AbstractEventLoop:
136
+ warnings.warn(
137
+ "connector.loop property is deprecated", DeprecationWarning, stacklevel=2
138
+ )
139
+ return self._loop
140
+
141
+ @property
142
+ def transport(self) -> Optional[asyncio.Transport]:
143
+ if self._protocol is None:
144
+ return None
145
+ return self._protocol.transport
146
+
147
+ @property
148
+ def protocol(self) -> Optional[ResponseHandler]:
149
+ return self._protocol
150
+
151
+ def add_callback(self, callback: Callable[[], None]) -> None:
152
+ if callback is not None:
153
+ self._callbacks.append(callback)
154
+
155
+ def _notify_release(self) -> None:
156
+ callbacks, self._callbacks = self._callbacks[:], []
157
+
158
+ for cb in callbacks:
159
+ with suppress(Exception):
160
+ cb()
161
+
162
+ def close(self) -> None:
163
+ self._notify_release()
164
+
165
+ if self._protocol is not None:
166
+ self._connector._release(self._key, self._protocol, should_close=True)
167
+ self._protocol = None
168
+
169
+ def release(self) -> None:
170
+ self._notify_release()
171
+
172
+ if self._protocol is not None:
173
+ self._connector._release(
174
+ self._key, self._protocol, should_close=self._protocol.should_close
175
+ )
176
+ self._protocol = None
177
+
178
+ @property
179
+ def closed(self) -> bool:
180
+ return self._protocol is None or not self._protocol.is_connected()
181
+
182
+
183
+ class _TransportPlaceholder:
184
+ """placeholder for BaseConnector.connect function"""
185
+
186
+ def close(self) -> None:
187
+ pass
188
+
189
+
190
+ class BaseConnector:
191
+ """Base connector class.
192
+
193
+ keepalive_timeout - (optional) Keep-alive timeout.
194
+ force_close - Set to True to force close and do reconnect
195
+ after each request (and between redirects).
196
+ limit - The total number of simultaneous connections.
197
+ limit_per_host - Number of simultaneous connections to one host.
198
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
199
+ Disabled by default.
200
+ timeout_ceil_threshold - Trigger ceiling of timeout values when
201
+ it's above timeout_ceil_threshold.
202
+ loop - Optional event loop.
203
+ """
204
+
205
+ _closed = True # prevent AttributeError in __del__ if ctor was failed
206
+ _source_traceback = None
207
+
208
+ # abort transport after 2 seconds (cleanup broken connections)
209
+ _cleanup_closed_period = 2.0
210
+
211
+ def __init__(
212
+ self,
213
+ *,
214
+ keepalive_timeout: Union[object, None, float] = sentinel,
215
+ force_close: bool = False,
216
+ limit: int = 100,
217
+ limit_per_host: int = 0,
218
+ enable_cleanup_closed: bool = False,
219
+ loop: Optional[asyncio.AbstractEventLoop] = None,
220
+ timeout_ceil_threshold: float = 5,
221
+ ) -> None:
222
+
223
+ if force_close:
224
+ if keepalive_timeout is not None and keepalive_timeout is not sentinel:
225
+ raise ValueError(
226
+ "keepalive_timeout cannot " "be set if force_close is True"
227
+ )
228
+ else:
229
+ if keepalive_timeout is sentinel:
230
+ keepalive_timeout = 15.0
231
+
232
+ loop = get_running_loop(loop)
233
+ self._timeout_ceil_threshold = timeout_ceil_threshold
234
+
235
+ self._closed = False
236
+ if loop.get_debug():
237
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
238
+
239
+ self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {}
240
+ self._limit = limit
241
+ self._limit_per_host = limit_per_host
242
+ self._acquired: Set[ResponseHandler] = set()
243
+ self._acquired_per_host: DefaultDict[
244
+ ConnectionKey, Set[ResponseHandler]
245
+ ] = defaultdict(set)
246
+ self._keepalive_timeout = cast(float, keepalive_timeout)
247
+ self._force_close = force_close
248
+
249
+ # {host_key: FIFO list of waiters}
250
+ self._waiters = defaultdict(deque) # type: ignore[var-annotated]
251
+
252
+ self._loop = loop
253
+ self._factory = functools.partial(ResponseHandler, loop=loop)
254
+
255
+ self.cookies = SimpleCookie()
256
+
257
+ # start keep-alive connection cleanup task
258
+ self._cleanup_handle: Optional[asyncio.TimerHandle] = None
259
+
260
+ # start cleanup closed transports task
261
+ self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
262
+ self._cleanup_closed_disabled = not enable_cleanup_closed
263
+ self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
264
+ self._cleanup_closed()
265
+
266
+ def __del__(self, _warnings: Any = warnings) -> None:
267
+ if self._closed:
268
+ return
269
+ if not self._conns:
270
+ return
271
+
272
+ conns = [repr(c) for c in self._conns.values()]
273
+
274
+ self._close()
275
+
276
+ kwargs = {"source": self}
277
+ _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
278
+ context = {
279
+ "connector": self,
280
+ "connections": conns,
281
+ "message": "Unclosed connector",
282
+ }
283
+ if self._source_traceback is not None:
284
+ context["source_traceback"] = self._source_traceback
285
+ self._loop.call_exception_handler(context)
286
+
287
+ def __enter__(self) -> "BaseConnector":
288
+ warnings.warn(
289
+ '"with Connector():" is deprecated, '
290
+ 'use "async with Connector():" instead',
291
+ DeprecationWarning,
292
+ )
293
+ return self
294
+
295
+ def __exit__(self, *exc: Any) -> None:
296
+ self._close()
297
+
298
+ async def __aenter__(self) -> "BaseConnector":
299
+ return self
300
+
301
+ async def __aexit__(
302
+ self,
303
+ exc_type: Optional[Type[BaseException]] = None,
304
+ exc_value: Optional[BaseException] = None,
305
+ exc_traceback: Optional[TracebackType] = None,
306
+ ) -> None:
307
+ await self.close()
308
+
309
+ @property
310
+ def force_close(self) -> bool:
311
+ """Ultimately close connection on releasing if True."""
312
+ return self._force_close
313
+
314
+ @property
315
+ def limit(self) -> int:
316
+ """The total number for simultaneous connections.
317
+
318
+ If limit is 0 the connector has no limit.
319
+ The default limit size is 100.
320
+ """
321
+ return self._limit
322
+
323
+ @property
324
+ def limit_per_host(self) -> int:
325
+ """The limit for simultaneous connections to the same endpoint.
326
+
327
+ Endpoints are the same if they are have equal
328
+ (host, port, is_ssl) triple.
329
+ """
330
+ return self._limit_per_host
331
+
332
+ def _cleanup(self) -> None:
333
+ """Cleanup unused transports."""
334
+ if self._cleanup_handle:
335
+ self._cleanup_handle.cancel()
336
+ # _cleanup_handle should be unset, otherwise _release() will not
337
+ # recreate it ever!
338
+ self._cleanup_handle = None
339
+
340
+ now = self._loop.time()
341
+ timeout = self._keepalive_timeout
342
+
343
+ if self._conns:
344
+ connections = {}
345
+ deadline = now - timeout
346
+ for key, conns in self._conns.items():
347
+ alive = []
348
+ for proto, use_time in conns:
349
+ if proto.is_connected():
350
+ if use_time - deadline < 0:
351
+ transport = proto.transport
352
+ proto.close()
353
+ if key.is_ssl and not self._cleanup_closed_disabled:
354
+ self._cleanup_closed_transports.append(transport)
355
+ else:
356
+ alive.append((proto, use_time))
357
+ else:
358
+ transport = proto.transport
359
+ proto.close()
360
+ if key.is_ssl and not self._cleanup_closed_disabled:
361
+ self._cleanup_closed_transports.append(transport)
362
+
363
+ if alive:
364
+ connections[key] = alive
365
+
366
+ self._conns = connections
367
+
368
+ if self._conns:
369
+ self._cleanup_handle = helpers.weakref_handle(
370
+ self,
371
+ "_cleanup",
372
+ timeout,
373
+ self._loop,
374
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
375
+ )
376
+
377
+ def _drop_acquired_per_host(
378
+ self, key: "ConnectionKey", val: ResponseHandler
379
+ ) -> None:
380
+ acquired_per_host = self._acquired_per_host
381
+ if key not in acquired_per_host:
382
+ return
383
+ conns = acquired_per_host[key]
384
+ conns.remove(val)
385
+ if not conns:
386
+ del self._acquired_per_host[key]
387
+
388
+ def _cleanup_closed(self) -> None:
389
+ """Double confirmation for transport close.
390
+
391
+ Some broken ssl servers may leave socket open without proper close.
392
+ """
393
+ if self._cleanup_closed_handle:
394
+ self._cleanup_closed_handle.cancel()
395
+
396
+ for transport in self._cleanup_closed_transports:
397
+ if transport is not None:
398
+ transport.abort()
399
+
400
+ self._cleanup_closed_transports = []
401
+
402
+ if not self._cleanup_closed_disabled:
403
+ self._cleanup_closed_handle = helpers.weakref_handle(
404
+ self,
405
+ "_cleanup_closed",
406
+ self._cleanup_closed_period,
407
+ self._loop,
408
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
409
+ )
410
+
411
+ def close(self) -> Awaitable[None]:
412
+ """Close all opened transports."""
413
+ self._close()
414
+ return _DeprecationWaiter(noop())
415
+
416
+ def _close(self) -> None:
417
+ if self._closed:
418
+ return
419
+
420
+ self._closed = True
421
+
422
+ try:
423
+ if self._loop.is_closed():
424
+ return
425
+
426
+ # cancel cleanup task
427
+ if self._cleanup_handle:
428
+ self._cleanup_handle.cancel()
429
+
430
+ # cancel cleanup close task
431
+ if self._cleanup_closed_handle:
432
+ self._cleanup_closed_handle.cancel()
433
+
434
+ for data in self._conns.values():
435
+ for proto, t0 in data:
436
+ proto.close()
437
+
438
+ for proto in self._acquired:
439
+ proto.close()
440
+
441
+ for transport in self._cleanup_closed_transports:
442
+ if transport is not None:
443
+ transport.abort()
444
+
445
+ finally:
446
+ self._conns.clear()
447
+ self._acquired.clear()
448
+ self._waiters.clear()
449
+ self._cleanup_handle = None
450
+ self._cleanup_closed_transports.clear()
451
+ self._cleanup_closed_handle = None
452
+
453
+ @property
454
+ def closed(self) -> bool:
455
+ """Is connector closed.
456
+
457
+ A readonly property.
458
+ """
459
+ return self._closed
460
+
461
+ def _available_connections(self, key: "ConnectionKey") -> int:
462
+ """
463
+ Return number of available connections.
464
+
465
+ The limit, limit_per_host and the connection key are taken into account.
466
+
467
+ If it returns less than 1 means that there are no connections
468
+ available.
469
+ """
470
+ if self._limit:
471
+ # total calc available connections
472
+ available = self._limit - len(self._acquired)
473
+
474
+ # check limit per host
475
+ if (
476
+ self._limit_per_host
477
+ and available > 0
478
+ and key in self._acquired_per_host
479
+ ):
480
+ acquired = self._acquired_per_host.get(key)
481
+ assert acquired is not None
482
+ available = self._limit_per_host - len(acquired)
483
+
484
+ elif self._limit_per_host and key in self._acquired_per_host:
485
+ # check limit per host
486
+ acquired = self._acquired_per_host.get(key)
487
+ assert acquired is not None
488
+ available = self._limit_per_host - len(acquired)
489
+ else:
490
+ available = 1
491
+
492
+ return available
493
+
494
+ async def connect(
495
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
496
+ ) -> Connection:
497
+ """Get from pool or create new connection."""
498
+ key = req.connection_key
499
+ available = self._available_connections(key)
500
+
501
+ # Wait if there are no available connections or if there are/were
502
+ # waiters (i.e. don't steal connection from a waiter about to wake up)
503
+ if available <= 0 or key in self._waiters:
504
+ fut = self._loop.create_future()
505
+
506
+ # This connection will now count towards the limit.
507
+ self._waiters[key].append(fut)
508
+
509
+ if traces:
510
+ for trace in traces:
511
+ await trace.send_connection_queued_start()
512
+
513
+ try:
514
+ await fut
515
+ except BaseException as e:
516
+ if key in self._waiters:
517
+ # remove a waiter even if it was cancelled, normally it's
518
+ # removed when it's notified
519
+ try:
520
+ self._waiters[key].remove(fut)
521
+ except ValueError: # fut may no longer be in list
522
+ pass
523
+
524
+ raise e
525
+ finally:
526
+ if key in self._waiters and not self._waiters[key]:
527
+ del self._waiters[key]
528
+
529
+ if traces:
530
+ for trace in traces:
531
+ await trace.send_connection_queued_end()
532
+
533
+ proto = self._get(key)
534
+ if proto is None:
535
+ placeholder = cast(ResponseHandler, _TransportPlaceholder())
536
+ self._acquired.add(placeholder)
537
+ self._acquired_per_host[key].add(placeholder)
538
+
539
+ if traces:
540
+ for trace in traces:
541
+ await trace.send_connection_create_start()
542
+
543
+ try:
544
+ proto = await self._create_connection(req, traces, timeout)
545
+ if self._closed:
546
+ proto.close()
547
+ raise ClientConnectionError("Connector is closed.")
548
+ except BaseException:
549
+ if not self._closed:
550
+ self._acquired.remove(placeholder)
551
+ self._drop_acquired_per_host(key, placeholder)
552
+ self._release_waiter()
553
+ raise
554
+ else:
555
+ if not self._closed:
556
+ self._acquired.remove(placeholder)
557
+ self._drop_acquired_per_host(key, placeholder)
558
+
559
+ if traces:
560
+ for trace in traces:
561
+ await trace.send_connection_create_end()
562
+ else:
563
+ if traces:
564
+ # Acquire the connection to prevent race conditions with limits
565
+ placeholder = cast(ResponseHandler, _TransportPlaceholder())
566
+ self._acquired.add(placeholder)
567
+ self._acquired_per_host[key].add(placeholder)
568
+ for trace in traces:
569
+ await trace.send_connection_reuseconn()
570
+ self._acquired.remove(placeholder)
571
+ self._drop_acquired_per_host(key, placeholder)
572
+
573
+ self._acquired.add(proto)
574
+ self._acquired_per_host[key].add(proto)
575
+ return Connection(self, key, proto, self._loop)
576
+
577
+ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
578
+ try:
579
+ conns = self._conns[key]
580
+ except KeyError:
581
+ return None
582
+
583
+ t1 = self._loop.time()
584
+ while conns:
585
+ proto, t0 = conns.pop()
586
+ if proto.is_connected():
587
+ if t1 - t0 > self._keepalive_timeout:
588
+ transport = proto.transport
589
+ proto.close()
590
+ # only for SSL transports
591
+ if key.is_ssl and not self._cleanup_closed_disabled:
592
+ self._cleanup_closed_transports.append(transport)
593
+ else:
594
+ if not conns:
595
+ # The very last connection was reclaimed: drop the key
596
+ del self._conns[key]
597
+ return proto
598
+ else:
599
+ transport = proto.transport
600
+ proto.close()
601
+ if key.is_ssl and not self._cleanup_closed_disabled:
602
+ self._cleanup_closed_transports.append(transport)
603
+
604
+ # No more connections: drop the key
605
+ del self._conns[key]
606
+ return None
607
+
608
+ def _release_waiter(self) -> None:
609
+ """
610
+ Iterates over all waiters until one to be released is found.
611
+
612
+ The one to be released is not finished and
613
+ belongs to a host that has available connections.
614
+ """
615
+ if not self._waiters:
616
+ return
617
+
618
+ # Having the dict keys ordered this avoids to iterate
619
+ # at the same order at each call.
620
+ queues = list(self._waiters.keys())
621
+ random.shuffle(queues)
622
+
623
+ for key in queues:
624
+ if self._available_connections(key) < 1:
625
+ continue
626
+
627
+ waiters = self._waiters[key]
628
+ while waiters:
629
+ waiter = waiters.popleft()
630
+ if not waiter.done():
631
+ waiter.set_result(None)
632
+ return
633
+
634
+ def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
635
+ if self._closed:
636
+ # acquired connection is already released on connector closing
637
+ return
638
+
639
+ try:
640
+ self._acquired.remove(proto)
641
+ self._drop_acquired_per_host(key, proto)
642
+ except KeyError: # pragma: no cover
643
+ # this may be result of undetermenistic order of objects
644
+ # finalization due garbage collection.
645
+ pass
646
+ else:
647
+ self._release_waiter()
648
+
649
+ def _release(
650
+ self,
651
+ key: "ConnectionKey",
652
+ protocol: ResponseHandler,
653
+ *,
654
+ should_close: bool = False,
655
+ ) -> None:
656
+ if self._closed:
657
+ # acquired connection is already released on connector closing
658
+ return
659
+
660
+ self._release_acquired(key, protocol)
661
+
662
+ if self._force_close:
663
+ should_close = True
664
+
665
+ if should_close or protocol.should_close:
666
+ transport = protocol.transport
667
+ protocol.close()
668
+
669
+ if key.is_ssl and not self._cleanup_closed_disabled:
670
+ self._cleanup_closed_transports.append(transport)
671
+ else:
672
+ conns = self._conns.get(key)
673
+ if conns is None:
674
+ conns = self._conns[key] = []
675
+ conns.append((protocol, self._loop.time()))
676
+
677
+ if self._cleanup_handle is None:
678
+ self._cleanup_handle = helpers.weakref_handle(
679
+ self,
680
+ "_cleanup",
681
+ self._keepalive_timeout,
682
+ self._loop,
683
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
684
+ )
685
+
686
+ async def _create_connection(
687
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
688
+ ) -> ResponseHandler:
689
+ raise NotImplementedError()
690
+
691
+
692
+ class _DNSCacheTable:
693
+ def __init__(self, ttl: Optional[float] = None) -> None:
694
+ self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {}
695
+ self._timestamps: Dict[Tuple[str, int], float] = {}
696
+ self._ttl = ttl
697
+
698
+ def __contains__(self, host: object) -> bool:
699
+ return host in self._addrs_rr
700
+
701
+ def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None:
702
+ self._addrs_rr[key] = (cycle(addrs), len(addrs))
703
+
704
+ if self._ttl is not None:
705
+ self._timestamps[key] = monotonic()
706
+
707
+ def remove(self, key: Tuple[str, int]) -> None:
708
+ self._addrs_rr.pop(key, None)
709
+
710
+ if self._ttl is not None:
711
+ self._timestamps.pop(key, None)
712
+
713
+ def clear(self) -> None:
714
+ self._addrs_rr.clear()
715
+ self._timestamps.clear()
716
+
717
+ def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]:
718
+ loop, length = self._addrs_rr[key]
719
+ addrs = list(islice(loop, length))
720
+ # Consume one more element to shift internal state of `cycle`
721
+ next(loop)
722
+ return addrs
723
+
724
+ def expired(self, key: Tuple[str, int]) -> bool:
725
+ if self._ttl is None:
726
+ return False
727
+
728
+ return self._timestamps[key] + self._ttl < monotonic()
729
+
730
+
731
+ class TCPConnector(BaseConnector):
732
+ """TCP connector.
733
+
734
+ verify_ssl - Set to True to check ssl certifications.
735
+ fingerprint - Pass the binary sha256
736
+ digest of the expected certificate in DER format to verify
737
+ that the certificate the server presents matches. See also
738
+ https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning
739
+ resolver - Enable DNS lookups and use this
740
+ resolver
741
+ use_dns_cache - Use memory cache for DNS lookups.
742
+ ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
743
+ family - socket address family
744
+ local_addr - local tuple of (host, port) to bind socket to
745
+
746
+ keepalive_timeout - (optional) Keep-alive timeout.
747
+ force_close - Set to True to force close and do reconnect
748
+ after each request (and between redirects).
749
+ limit - The total number of simultaneous connections.
750
+ limit_per_host - Number of simultaneous connections to one host.
751
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
752
+ Disabled by default.
753
+ loop - Optional event loop.
754
+ """
755
+
756
+ def __init__(
757
+ self,
758
+ *,
759
+ verify_ssl: bool = True,
760
+ fingerprint: Optional[bytes] = None,
761
+ use_dns_cache: bool = True,
762
+ ttl_dns_cache: Optional[int] = 10,
763
+ family: int = 0,
764
+ ssl_context: Optional[SSLContext] = None,
765
+ ssl: Union[bool, Fingerprint, SSLContext] = True,
766
+ local_addr: Optional[Tuple[str, int]] = None,
767
+ resolver: Optional[AbstractResolver] = None,
768
+ keepalive_timeout: Union[None, float, object] = sentinel,
769
+ force_close: bool = False,
770
+ limit: int = 100,
771
+ limit_per_host: int = 0,
772
+ enable_cleanup_closed: bool = False,
773
+ loop: Optional[asyncio.AbstractEventLoop] = None,
774
+ timeout_ceil_threshold: float = 5,
775
+ ):
776
+ super().__init__(
777
+ keepalive_timeout=keepalive_timeout,
778
+ force_close=force_close,
779
+ limit=limit,
780
+ limit_per_host=limit_per_host,
781
+ enable_cleanup_closed=enable_cleanup_closed,
782
+ loop=loop,
783
+ timeout_ceil_threshold=timeout_ceil_threshold,
784
+ )
785
+
786
+ self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
787
+ if resolver is None:
788
+ resolver = DefaultResolver(loop=self._loop)
789
+ self._resolver = resolver
790
+
791
+ self._use_dns_cache = use_dns_cache
792
+ self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
793
+ self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {}
794
+ self._family = family
795
+ self._local_addr = local_addr
796
+
797
+ def close(self) -> Awaitable[None]:
798
+ """Close all ongoing DNS calls."""
799
+ for ev in self._throttle_dns_events.values():
800
+ ev.cancel()
801
+
802
+ return super().close()
803
+
804
+ @property
805
+ def family(self) -> int:
806
+ """Socket family like AF_INET."""
807
+ return self._family
808
+
809
+ @property
810
+ def use_dns_cache(self) -> bool:
811
+ """True if local DNS caching is enabled."""
812
+ return self._use_dns_cache
813
+
814
+ def clear_dns_cache(
815
+ self, host: Optional[str] = None, port: Optional[int] = None
816
+ ) -> None:
817
+ """Remove specified host/port or clear all dns local cache."""
818
+ if host is not None and port is not None:
819
+ self._cached_hosts.remove((host, port))
820
+ elif host is not None or port is not None:
821
+ raise ValueError("either both host and port " "or none of them are allowed")
822
+ else:
823
+ self._cached_hosts.clear()
824
+
825
+ async def _resolve_host(
826
+ self, host: str, port: int, traces: Optional[List["Trace"]] = None
827
+ ) -> List[Dict[str, Any]]:
828
+ """Resolve host and return list of addresses."""
829
+ if is_ip_address(host):
830
+ return [
831
+ {
832
+ "hostname": host,
833
+ "host": host,
834
+ "port": port,
835
+ "family": self._family,
836
+ "proto": 0,
837
+ "flags": 0,
838
+ }
839
+ ]
840
+
841
+ if not self._use_dns_cache:
842
+
843
+ if traces:
844
+ for trace in traces:
845
+ await trace.send_dns_resolvehost_start(host)
846
+
847
+ res = await self._resolver.resolve(host, port, family=self._family)
848
+
849
+ if traces:
850
+ for trace in traces:
851
+ await trace.send_dns_resolvehost_end(host)
852
+
853
+ return res
854
+
855
+ key = (host, port)
856
+ if key in self._cached_hosts and not self._cached_hosts.expired(key):
857
+ # get result early, before any await (#4014)
858
+ result = self._cached_hosts.next_addrs(key)
859
+
860
+ if traces:
861
+ for trace in traces:
862
+ await trace.send_dns_cache_hit(host)
863
+ return result
864
+
865
+ #
866
+ # If multiple connectors are resolving the same host, we wait
867
+ # for the first one to resolve and then use the result for all of them.
868
+ # We use a throttle event to ensure that we only resolve the host once
869
+ # and then use the result for all the waiters.
870
+ #
871
+ # In this case we need to create a task to ensure that we can shield
872
+ # the task from cancellation as cancelling this lookup should not cancel
873
+ # the underlying lookup or else the cancel event will get broadcast to
874
+ # all the waiters across all connections.
875
+ #
876
+ resolved_host_task = asyncio.create_task(
877
+ self._resolve_host_with_throttle(key, host, port, traces)
878
+ )
879
+ try:
880
+ return await asyncio.shield(resolved_host_task)
881
+ except asyncio.CancelledError:
882
+
883
+ def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
884
+ with suppress(Exception, asyncio.CancelledError):
885
+ fut.result()
886
+
887
+ resolved_host_task.add_done_callback(drop_exception)
888
+ raise
889
+
890
+ async def _resolve_host_with_throttle(
891
+ self,
892
+ key: Tuple[str, int],
893
+ host: str,
894
+ port: int,
895
+ traces: Optional[List["Trace"]],
896
+ ) -> List[Dict[str, Any]]:
897
+ """Resolve host with a dns events throttle."""
898
+ if key in self._throttle_dns_events:
899
+ # get event early, before any await (#4014)
900
+ event = self._throttle_dns_events[key]
901
+ if traces:
902
+ for trace in traces:
903
+ await trace.send_dns_cache_hit(host)
904
+ await event.wait()
905
+ else:
906
+ # update dict early, before any await (#4014)
907
+ self._throttle_dns_events[key] = EventResultOrError(self._loop)
908
+ if traces:
909
+ for trace in traces:
910
+ await trace.send_dns_cache_miss(host)
911
+ try:
912
+
913
+ if traces:
914
+ for trace in traces:
915
+ await trace.send_dns_resolvehost_start(host)
916
+
917
+ addrs = await self._resolver.resolve(host, port, family=self._family)
918
+ if traces:
919
+ for trace in traces:
920
+ await trace.send_dns_resolvehost_end(host)
921
+
922
+ self._cached_hosts.add(key, addrs)
923
+ self._throttle_dns_events[key].set()
924
+ except BaseException as e:
925
+ # any DNS exception, independently of the implementation
926
+ # is set for the waiters to raise the same exception.
927
+ self._throttle_dns_events[key].set(exc=e)
928
+ raise
929
+ finally:
930
+ self._throttle_dns_events.pop(key)
931
+
932
+ return self._cached_hosts.next_addrs(key)
933
+
934
+ async def _create_connection(
935
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
936
+ ) -> ResponseHandler:
937
+ """Create connection.
938
+
939
+ Has same keyword arguments as BaseEventLoop.create_connection.
940
+ """
941
+ if req.proxy:
942
+ _, proto = await self._create_proxy_connection(req, traces, timeout)
943
+ else:
944
+ _, proto = await self._create_direct_connection(req, traces, timeout)
945
+
946
+ return proto
947
+
948
+ @staticmethod
949
+ @functools.lru_cache(None)
950
+ def _make_ssl_context(verified: bool) -> SSLContext:
951
+ if verified:
952
+ return ssl.create_default_context()
953
+ else:
954
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
955
+ sslcontext.options |= ssl.OP_NO_SSLv2
956
+ sslcontext.options |= ssl.OP_NO_SSLv3
957
+ sslcontext.check_hostname = False
958
+ sslcontext.verify_mode = ssl.CERT_NONE
959
+ try:
960
+ sslcontext.options |= ssl.OP_NO_COMPRESSION
961
+ except AttributeError as attr_err:
962
+ warnings.warn(
963
+ "{!s}: The Python interpreter is compiled "
964
+ "against OpenSSL < 1.0.0. Ref: "
965
+ "https://docs.python.org/3/library/ssl.html"
966
+ "#ssl.OP_NO_COMPRESSION".format(attr_err),
967
+ )
968
+ sslcontext.set_default_verify_paths()
969
+ return sslcontext
970
+
971
+ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
972
+ """Logic to get the correct SSL context
973
+
974
+ 0. if req.ssl is false, return None
975
+
976
+ 1. if ssl_context is specified in req, use it
977
+ 2. if _ssl_context is specified in self, use it
978
+ 3. otherwise:
979
+ 1. if verify_ssl is not specified in req, use self.ssl_context
980
+ (will generate a default context according to self.verify_ssl)
981
+ 2. if verify_ssl is True in req, generate a default SSL context
982
+ 3. if verify_ssl is False in req, generate a SSL context that
983
+ won't verify
984
+ """
985
+ if req.is_ssl():
986
+ if ssl is None: # pragma: no cover
987
+ raise RuntimeError("SSL is not supported.")
988
+ sslcontext = req.ssl
989
+ if isinstance(sslcontext, ssl.SSLContext):
990
+ return sslcontext
991
+ if sslcontext is not True:
992
+ # not verified or fingerprinted
993
+ return self._make_ssl_context(False)
994
+ sslcontext = self._ssl
995
+ if isinstance(sslcontext, ssl.SSLContext):
996
+ return sslcontext
997
+ if sslcontext is not True:
998
+ # not verified or fingerprinted
999
+ return self._make_ssl_context(False)
1000
+ return self._make_ssl_context(True)
1001
+ else:
1002
+ return None
1003
+
1004
+ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
1005
+ ret = req.ssl
1006
+ if isinstance(ret, Fingerprint):
1007
+ return ret
1008
+ ret = self._ssl
1009
+ if isinstance(ret, Fingerprint):
1010
+ return ret
1011
+ return None
1012
+
1013
+ async def _wrap_create_connection(
1014
+ self,
1015
+ *args: Any,
1016
+ req: ClientRequest,
1017
+ timeout: "ClientTimeout",
1018
+ client_error: Type[Exception] = ClientConnectorError,
1019
+ **kwargs: Any,
1020
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
1021
+ try:
1022
+ async with ceil_timeout(
1023
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1024
+ ):
1025
+ return await self._loop.create_connection(*args, **kwargs)
1026
+ except cert_errors as exc:
1027
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
1028
+ except ssl_errors as exc:
1029
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
1030
+ except OSError as exc:
1031
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1032
+ raise
1033
+ raise client_error(req.connection_key, exc) from exc
1034
+
1035
+ def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
1036
+ """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
1037
+
1038
+ It is necessary for TLS-in-TLS so that it is possible to
1039
+ send HTTPS queries through HTTPS proxies.
1040
+
1041
+ This doesn't affect regular HTTP requests, though.
1042
+ """
1043
+ if not req.is_ssl():
1044
+ return
1045
+
1046
+ proxy_url = req.proxy
1047
+ assert proxy_url is not None
1048
+ if proxy_url.scheme != "https":
1049
+ return
1050
+
1051
+ self._check_loop_for_start_tls()
1052
+
1053
+ def _check_loop_for_start_tls(self) -> None:
1054
+ try:
1055
+ self._loop.start_tls
1056
+ except AttributeError as attr_exc:
1057
+ raise RuntimeError(
1058
+ "An HTTPS request is being sent through an HTTPS proxy. "
1059
+ "This needs support for TLS in TLS but it is not implemented "
1060
+ "in your runtime for the stdlib asyncio.\n\n"
1061
+ "Please upgrade to Python 3.11 or higher. For more details, "
1062
+ "please see:\n"
1063
+ "* https://bugs.python.org/issue37179\n"
1064
+ "* https://github.com/python/cpython/pull/28073\n"
1065
+ "* https://docs.aiohttp.org/en/stable/"
1066
+ "client_advanced.html#proxy-support\n"
1067
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
1068
+ ) from attr_exc
1069
+
1070
+ def _loop_supports_start_tls(self) -> bool:
1071
+ try:
1072
+ self._check_loop_for_start_tls()
1073
+ except RuntimeError:
1074
+ return False
1075
+ else:
1076
+ return True
1077
+
1078
+ def _warn_about_tls_in_tls(
1079
+ self,
1080
+ underlying_transport: asyncio.Transport,
1081
+ req: ClientRequest,
1082
+ ) -> None:
1083
+ """Issue a warning if the requested URL has HTTPS scheme."""
1084
+ if req.request_info.url.scheme != "https":
1085
+ return
1086
+
1087
+ asyncio_supports_tls_in_tls = getattr(
1088
+ underlying_transport,
1089
+ "_start_tls_compatible",
1090
+ False,
1091
+ )
1092
+
1093
+ if asyncio_supports_tls_in_tls:
1094
+ return
1095
+
1096
+ warnings.warn(
1097
+ "An HTTPS request is being sent through an HTTPS proxy. "
1098
+ "This support for TLS in TLS is known to be disabled "
1099
+ "in the stdlib asyncio (Python <3.11). This is why you'll probably see "
1100
+ "an error in the log below.\n\n"
1101
+ "It is possible to enable it via monkeypatching. "
1102
+ "For more details, see:\n"
1103
+ "* https://bugs.python.org/issue37179\n"
1104
+ "* https://github.com/python/cpython/pull/28073\n\n"
1105
+ "You can temporarily patch this as follows:\n"
1106
+ "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
1107
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
1108
+ RuntimeWarning,
1109
+ source=self,
1110
+ # Why `4`? At least 3 of the calls in the stack originate
1111
+ # from the methods in this class.
1112
+ stacklevel=3,
1113
+ )
1114
+
1115
+ async def _start_tls_connection(
1116
+ self,
1117
+ underlying_transport: asyncio.Transport,
1118
+ req: ClientRequest,
1119
+ timeout: "ClientTimeout",
1120
+ client_error: Type[Exception] = ClientConnectorError,
1121
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
1122
+ """Wrap the raw TCP transport with TLS."""
1123
+ tls_proto = self._factory() # Create a brand new proto for TLS
1124
+
1125
+ # Safety of the `cast()` call here is based on the fact that
1126
+ # internally `_get_ssl_context()` only returns `None` when
1127
+ # `req.is_ssl()` evaluates to `False` which is never gonna happen
1128
+ # in this code path. Of course, it's rather fragile
1129
+ # maintainability-wise but this is to be solved separately.
1130
+ sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
1131
+
1132
+ try:
1133
+ async with ceil_timeout(
1134
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1135
+ ):
1136
+ try:
1137
+ tls_transport = await self._loop.start_tls(
1138
+ underlying_transport,
1139
+ tls_proto,
1140
+ sslcontext,
1141
+ server_hostname=req.server_hostname or req.host,
1142
+ ssl_handshake_timeout=timeout.total,
1143
+ )
1144
+ except BaseException:
1145
+ # We need to close the underlying transport since
1146
+ # `start_tls()` probably failed before it had a
1147
+ # chance to do this:
1148
+ underlying_transport.close()
1149
+ raise
1150
+ except cert_errors as exc:
1151
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
1152
+ except ssl_errors as exc:
1153
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
1154
+ except OSError as exc:
1155
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1156
+ raise
1157
+ raise client_error(req.connection_key, exc) from exc
1158
+ except TypeError as type_err:
1159
+ # Example cause looks like this:
1160
+ # TypeError: transport <asyncio.sslproto._SSLProtocolTransport
1161
+ # object at 0x7f760615e460> is not supported by start_tls()
1162
+
1163
+ raise ClientConnectionError(
1164
+ "Cannot initialize a TLS-in-TLS connection to host "
1165
+ f"{req.host!s}:{req.port:d} through an underlying connection "
1166
+ f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
1167
+ f"[{type_err!s}]"
1168
+ ) from type_err
1169
+ else:
1170
+ if tls_transport is None:
1171
+ msg = "Failed to start TLS (possibly caused by closing transport)"
1172
+ raise client_error(req.connection_key, OSError(msg))
1173
+ tls_proto.connection_made(
1174
+ tls_transport
1175
+ ) # Kick the state machine of the new TLS protocol
1176
+
1177
+ return tls_transport, tls_proto
1178
+
1179
+ async def _create_direct_connection(
1180
+ self,
1181
+ req: ClientRequest,
1182
+ traces: List["Trace"],
1183
+ timeout: "ClientTimeout",
1184
+ *,
1185
+ client_error: Type[Exception] = ClientConnectorError,
1186
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
1187
+ sslcontext = self._get_ssl_context(req)
1188
+ fingerprint = self._get_fingerprint(req)
1189
+
1190
+ host = req.url.raw_host
1191
+ assert host is not None
1192
+ # Replace multiple trailing dots with a single one.
1193
+ # A trailing dot is only present for fully-qualified domain names.
1194
+ # See https://github.com/aio-libs/aiohttp/pull/7364.
1195
+ if host.endswith(".."):
1196
+ host = host.rstrip(".") + "."
1197
+ port = req.port
1198
+ assert port is not None
1199
+ try:
1200
+ # Cancelling this lookup should not cancel the underlying lookup
1201
+ # or else the cancel event will get broadcast to all the waiters
1202
+ # across all connections.
1203
+ hosts = await self._resolve_host(host, port, traces=traces)
1204
+ except OSError as exc:
1205
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1206
+ raise
1207
+ # in case of proxy it is not ClientProxyConnectionError
1208
+ # it is problem of resolving proxy ip itself
1209
+ raise ClientConnectorError(req.connection_key, exc) from exc
1210
+
1211
+ last_exc: Optional[Exception] = None
1212
+
1213
+ for hinfo in hosts:
1214
+ host = hinfo["host"]
1215
+ port = hinfo["port"]
1216
+
1217
+ # Strip trailing dots, certificates contain FQDN without dots.
1218
+ # See https://github.com/aio-libs/aiohttp/issues/3636
1219
+ server_hostname = (
1220
+ (req.server_hostname or hinfo["hostname"]).rstrip(".")
1221
+ if sslcontext
1222
+ else None
1223
+ )
1224
+
1225
+ try:
1226
+ transp, proto = await self._wrap_create_connection(
1227
+ self._factory,
1228
+ host,
1229
+ port,
1230
+ timeout=timeout,
1231
+ ssl=sslcontext,
1232
+ family=hinfo["family"],
1233
+ proto=hinfo["proto"],
1234
+ flags=hinfo["flags"],
1235
+ server_hostname=server_hostname,
1236
+ local_addr=self._local_addr,
1237
+ req=req,
1238
+ client_error=client_error,
1239
+ )
1240
+ except ClientConnectorError as exc:
1241
+ last_exc = exc
1242
+ continue
1243
+
1244
+ if req.is_ssl() and fingerprint:
1245
+ try:
1246
+ fingerprint.check(transp)
1247
+ except ServerFingerprintMismatch as exc:
1248
+ transp.close()
1249
+ if not self._cleanup_closed_disabled:
1250
+ self._cleanup_closed_transports.append(transp)
1251
+ last_exc = exc
1252
+ continue
1253
+
1254
+ return transp, proto
1255
+ else:
1256
+ assert last_exc is not None
1257
+ raise last_exc
1258
+
1259
+ async def _create_proxy_connection(
1260
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1261
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
1262
+ self._fail_on_no_start_tls(req)
1263
+ runtime_has_start_tls = self._loop_supports_start_tls()
1264
+
1265
+ headers: Dict[str, str] = {}
1266
+ if req.proxy_headers is not None:
1267
+ headers = req.proxy_headers # type: ignore[assignment]
1268
+ headers[hdrs.HOST] = req.headers[hdrs.HOST]
1269
+
1270
+ url = req.proxy
1271
+ assert url is not None
1272
+ proxy_req = ClientRequest(
1273
+ hdrs.METH_GET,
1274
+ url,
1275
+ headers=headers,
1276
+ auth=req.proxy_auth,
1277
+ loop=self._loop,
1278
+ ssl=req.ssl,
1279
+ )
1280
+
1281
+ # create connection to proxy server
1282
+ transport, proto = await self._create_direct_connection(
1283
+ proxy_req, [], timeout, client_error=ClientProxyConnectionError
1284
+ )
1285
+
1286
+ # Many HTTP proxies has buggy keepalive support. Let's not
1287
+ # reuse connection but close it after processing every
1288
+ # response.
1289
+ proto.force_close()
1290
+
1291
+ auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
1292
+ if auth is not None:
1293
+ if not req.is_ssl():
1294
+ req.headers[hdrs.PROXY_AUTHORIZATION] = auth
1295
+ else:
1296
+ proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
1297
+
1298
+ if req.is_ssl():
1299
+ if runtime_has_start_tls:
1300
+ self._warn_about_tls_in_tls(transport, req)
1301
+
1302
+ # For HTTPS requests over HTTP proxy
1303
+ # we must notify proxy to tunnel connection
1304
+ # so we send CONNECT command:
1305
+ # CONNECT www.python.org:443 HTTP/1.1
1306
+ # Host: www.python.org
1307
+ #
1308
+ # next we must do TLS handshake and so on
1309
+ # to do this we must wrap raw socket into secure one
1310
+ # asyncio handles this perfectly
1311
+ proxy_req.method = hdrs.METH_CONNECT
1312
+ proxy_req.url = req.url
1313
+ key = attr.evolve(
1314
+ req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
1315
+ )
1316
+ conn = Connection(self, key, proto, self._loop)
1317
+ proxy_resp = await proxy_req.send(conn)
1318
+ try:
1319
+ protocol = conn._protocol
1320
+ assert protocol is not None
1321
+
1322
+ # read_until_eof=True will ensure the connection isn't closed
1323
+ # once the response is received and processed allowing
1324
+ # START_TLS to work on the connection below.
1325
+ protocol.set_response_params(
1326
+ read_until_eof=runtime_has_start_tls,
1327
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
1328
+ )
1329
+ resp = await proxy_resp.start(conn)
1330
+ except BaseException:
1331
+ proxy_resp.close()
1332
+ conn.close()
1333
+ raise
1334
+ else:
1335
+ conn._protocol = None
1336
+ conn._transport = None
1337
+ try:
1338
+ if resp.status != 200:
1339
+ message = resp.reason
1340
+ if message is None:
1341
+ message = HTTPStatus(resp.status).phrase
1342
+ raise ClientHttpProxyError(
1343
+ proxy_resp.request_info,
1344
+ resp.history,
1345
+ status=resp.status,
1346
+ message=message,
1347
+ headers=resp.headers,
1348
+ )
1349
+ if not runtime_has_start_tls:
1350
+ rawsock = transport.get_extra_info("socket", default=None)
1351
+ if rawsock is None:
1352
+ raise RuntimeError(
1353
+ "Transport does not expose socket instance"
1354
+ )
1355
+ # Duplicate the socket, so now we can close proxy transport
1356
+ rawsock = rawsock.dup()
1357
+ except BaseException:
1358
+ # It shouldn't be closed in `finally` because it's fed to
1359
+ # `loop.start_tls()` and the docs say not to touch it after
1360
+ # passing there.
1361
+ transport.close()
1362
+ raise
1363
+ finally:
1364
+ if not runtime_has_start_tls:
1365
+ transport.close()
1366
+
1367
+ if not runtime_has_start_tls:
1368
+ # HTTP proxy with support for upgrade to HTTPS
1369
+ sslcontext = self._get_ssl_context(req)
1370
+ return await self._wrap_create_connection(
1371
+ self._factory,
1372
+ timeout=timeout,
1373
+ ssl=sslcontext,
1374
+ sock=rawsock,
1375
+ server_hostname=req.host,
1376
+ req=req,
1377
+ )
1378
+
1379
+ return await self._start_tls_connection(
1380
+ # Access the old transport for the last time before it's
1381
+ # closed and forgotten forever:
1382
+ transport,
1383
+ req=req,
1384
+ timeout=timeout,
1385
+ )
1386
+ finally:
1387
+ proxy_resp.close()
1388
+
1389
+ return transport, proto
1390
+
1391
+
1392
+ class UnixConnector(BaseConnector):
1393
+ """Unix socket connector.
1394
+
1395
+ path - Unix socket path.
1396
+ keepalive_timeout - (optional) Keep-alive timeout.
1397
+ force_close - Set to True to force close and do reconnect
1398
+ after each request (and between redirects).
1399
+ limit - The total number of simultaneous connections.
1400
+ limit_per_host - Number of simultaneous connections to one host.
1401
+ loop - Optional event loop.
1402
+ """
1403
+
1404
+ def __init__(
1405
+ self,
1406
+ path: str,
1407
+ force_close: bool = False,
1408
+ keepalive_timeout: Union[object, float, None] = sentinel,
1409
+ limit: int = 100,
1410
+ limit_per_host: int = 0,
1411
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1412
+ ) -> None:
1413
+ super().__init__(
1414
+ force_close=force_close,
1415
+ keepalive_timeout=keepalive_timeout,
1416
+ limit=limit,
1417
+ limit_per_host=limit_per_host,
1418
+ loop=loop,
1419
+ )
1420
+ self._path = path
1421
+
1422
+ @property
1423
+ def path(self) -> str:
1424
+ """Path to unix socket."""
1425
+ return self._path
1426
+
1427
+ async def _create_connection(
1428
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1429
+ ) -> ResponseHandler:
1430
+ try:
1431
+ async with ceil_timeout(
1432
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1433
+ ):
1434
+ _, proto = await self._loop.create_unix_connection(
1435
+ self._factory, self._path
1436
+ )
1437
+ except OSError as exc:
1438
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1439
+ raise
1440
+ raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
1441
+
1442
+ return proto
1443
+
1444
+
1445
+ class NamedPipeConnector(BaseConnector):
1446
+ """Named pipe connector.
1447
+
1448
+ Only supported by the proactor event loop.
1449
+ See also: https://docs.python.org/3/library/asyncio-eventloop.html
1450
+
1451
+ path - Windows named pipe path.
1452
+ keepalive_timeout - (optional) Keep-alive timeout.
1453
+ force_close - Set to True to force close and do reconnect
1454
+ after each request (and between redirects).
1455
+ limit - The total number of simultaneous connections.
1456
+ limit_per_host - Number of simultaneous connections to one host.
1457
+ loop - Optional event loop.
1458
+ """
1459
+
1460
+ def __init__(
1461
+ self,
1462
+ path: str,
1463
+ force_close: bool = False,
1464
+ keepalive_timeout: Union[object, float, None] = sentinel,
1465
+ limit: int = 100,
1466
+ limit_per_host: int = 0,
1467
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1468
+ ) -> None:
1469
+ super().__init__(
1470
+ force_close=force_close,
1471
+ keepalive_timeout=keepalive_timeout,
1472
+ limit=limit,
1473
+ limit_per_host=limit_per_host,
1474
+ loop=loop,
1475
+ )
1476
+ if not isinstance(
1477
+ self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
1478
+ ):
1479
+ raise RuntimeError(
1480
+ "Named Pipes only available in proactor " "loop under windows"
1481
+ )
1482
+ self._path = path
1483
+
1484
+ @property
1485
+ def path(self) -> str:
1486
+ """Path to the named pipe."""
1487
+ return self._path
1488
+
1489
+ async def _create_connection(
1490
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1491
+ ) -> ResponseHandler:
1492
+ try:
1493
+ async with ceil_timeout(
1494
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1495
+ ):
1496
+ _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
1497
+ self._factory, self._path
1498
+ )
1499
+ # the drain is required so that the connection_made is called
1500
+ # and transport is set otherwise it is not set before the
1501
+ # `assert conn.transport is not None`
1502
+ # in client.py's _request method
1503
+ await asyncio.sleep(0)
1504
+ # other option is to manually set transport like
1505
+ # `proto.transport = trans`
1506
+ except OSError as exc:
1507
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1508
+ raise
1509
+ raise ClientConnectorError(req.connection_key, exc) from exc
1510
+
1511
+ return cast(ResponseHandler, proto)
env-llmeval/lib/python3.10/site-packages/aiohttp/cookiejar.py ADDED
@@ -0,0 +1,419 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import calendar
3
+ import contextlib
4
+ import datetime
5
+ import os # noqa
6
+ import pathlib
7
+ import pickle
8
+ import re
9
+ import time
10
+ from collections import defaultdict
11
+ from http.cookies import BaseCookie, Morsel, SimpleCookie
12
+ from math import ceil
13
+ from typing import ( # noqa
14
+ DefaultDict,
15
+ Dict,
16
+ Iterable,
17
+ Iterator,
18
+ List,
19
+ Mapping,
20
+ Optional,
21
+ Set,
22
+ Tuple,
23
+ Union,
24
+ cast,
25
+ )
26
+
27
+ from yarl import URL
28
+
29
+ from .abc import AbstractCookieJar, ClearCookiePredicate
30
+ from .helpers import is_ip_address
31
+ from .typedefs import LooseCookies, PathLike, StrOrURL
32
+
33
+ __all__ = ("CookieJar", "DummyCookieJar")
34
+
35
+
36
+ CookieItem = Union[str, "Morsel[str]"]
37
+
38
+
39
+ class CookieJar(AbstractCookieJar):
40
+ """Implements cookie storage adhering to RFC 6265."""
41
+
42
+ DATE_TOKENS_RE = re.compile(
43
+ r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
44
+ r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
45
+ )
46
+
47
+ DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
48
+
49
+ DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
50
+
51
+ DATE_MONTH_RE = re.compile(
52
+ "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
53
+ re.I,
54
+ )
55
+
56
+ DATE_YEAR_RE = re.compile(r"(\d{2,4})")
57
+
58
+ # calendar.timegm() fails for timestamps after datetime.datetime.max
59
+ # Minus one as a loss of precision occurs when timestamp() is called.
60
+ MAX_TIME = (
61
+ int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
62
+ )
63
+ try:
64
+ calendar.timegm(time.gmtime(MAX_TIME))
65
+ except (OSError, ValueError):
66
+ # Hit the maximum representable time on Windows
67
+ # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
68
+ # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere
69
+ MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
70
+ except OverflowError:
71
+ # #4515: datetime.max may not be representable on 32-bit platforms
72
+ MAX_TIME = 2**31 - 1
73
+ # Avoid minuses in the future, 3x faster
74
+ SUB_MAX_TIME = MAX_TIME - 1
75
+
76
+ def __init__(
77
+ self,
78
+ *,
79
+ unsafe: bool = False,
80
+ quote_cookie: bool = True,
81
+ treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
82
+ loop: Optional[asyncio.AbstractEventLoop] = None,
83
+ ) -> None:
84
+ super().__init__(loop=loop)
85
+ self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
86
+ SimpleCookie
87
+ )
88
+ self._host_only_cookies: Set[Tuple[str, str]] = set()
89
+ self._unsafe = unsafe
90
+ self._quote_cookie = quote_cookie
91
+ if treat_as_secure_origin is None:
92
+ treat_as_secure_origin = []
93
+ elif isinstance(treat_as_secure_origin, URL):
94
+ treat_as_secure_origin = [treat_as_secure_origin.origin()]
95
+ elif isinstance(treat_as_secure_origin, str):
96
+ treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
97
+ else:
98
+ treat_as_secure_origin = [
99
+ URL(url).origin() if isinstance(url, str) else url.origin()
100
+ for url in treat_as_secure_origin
101
+ ]
102
+ self._treat_as_secure_origin = treat_as_secure_origin
103
+ self._next_expiration: float = ceil(time.time())
104
+ self._expirations: Dict[Tuple[str, str, str], float] = {}
105
+
106
+ def save(self, file_path: PathLike) -> None:
107
+ file_path = pathlib.Path(file_path)
108
+ with file_path.open(mode="wb") as f:
109
+ pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
110
+
111
+ def load(self, file_path: PathLike) -> None:
112
+ file_path = pathlib.Path(file_path)
113
+ with file_path.open(mode="rb") as f:
114
+ self._cookies = pickle.load(f)
115
+
116
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
117
+ if predicate is None:
118
+ self._next_expiration = ceil(time.time())
119
+ self._cookies.clear()
120
+ self._host_only_cookies.clear()
121
+ self._expirations.clear()
122
+ return
123
+
124
+ to_del = []
125
+ now = time.time()
126
+ for (domain, path), cookie in self._cookies.items():
127
+ for name, morsel in cookie.items():
128
+ key = (domain, path, name)
129
+ if (
130
+ key in self._expirations and self._expirations[key] <= now
131
+ ) or predicate(morsel):
132
+ to_del.append(key)
133
+
134
+ for domain, path, name in to_del:
135
+ self._host_only_cookies.discard((domain, name))
136
+ key = (domain, path, name)
137
+ if key in self._expirations:
138
+ del self._expirations[(domain, path, name)]
139
+ self._cookies[(domain, path)].pop(name, None)
140
+
141
+ self._next_expiration = (
142
+ min(*self._expirations.values(), self.SUB_MAX_TIME) + 1
143
+ if self._expirations
144
+ else self.MAX_TIME
145
+ )
146
+
147
+ def clear_domain(self, domain: str) -> None:
148
+ self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
149
+
150
+ def __iter__(self) -> "Iterator[Morsel[str]]":
151
+ self._do_expiration()
152
+ for val in self._cookies.values():
153
+ yield from val.values()
154
+
155
+ def __len__(self) -> int:
156
+ return sum(1 for i in self)
157
+
158
+ def _do_expiration(self) -> None:
159
+ self.clear(lambda x: False)
160
+
161
+ def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
162
+ self._next_expiration = min(self._next_expiration, when)
163
+ self._expirations[(domain, path, name)] = when
164
+
165
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
166
+ """Update cookies."""
167
+ hostname = response_url.raw_host
168
+
169
+ if not self._unsafe and is_ip_address(hostname):
170
+ # Don't accept cookies from IPs
171
+ return
172
+
173
+ if isinstance(cookies, Mapping):
174
+ cookies = cookies.items()
175
+
176
+ for name, cookie in cookies:
177
+ if not isinstance(cookie, Morsel):
178
+ tmp = SimpleCookie()
179
+ tmp[name] = cookie # type: ignore[assignment]
180
+ cookie = tmp[name]
181
+
182
+ domain = cookie["domain"]
183
+
184
+ # ignore domains with trailing dots
185
+ if domain.endswith("."):
186
+ domain = ""
187
+ del cookie["domain"]
188
+
189
+ if not domain and hostname is not None:
190
+ # Set the cookie's domain to the response hostname
191
+ # and set its host-only-flag
192
+ self._host_only_cookies.add((hostname, name))
193
+ domain = cookie["domain"] = hostname
194
+
195
+ if domain.startswith("."):
196
+ # Remove leading dot
197
+ domain = domain[1:]
198
+ cookie["domain"] = domain
199
+
200
+ if hostname and not self._is_domain_match(domain, hostname):
201
+ # Setting cookies for different domains is not allowed
202
+ continue
203
+
204
+ path = cookie["path"]
205
+ if not path or not path.startswith("/"):
206
+ # Set the cookie's path to the response path
207
+ path = response_url.path
208
+ if not path.startswith("/"):
209
+ path = "/"
210
+ else:
211
+ # Cut everything from the last slash to the end
212
+ path = "/" + path[1 : path.rfind("/")]
213
+ cookie["path"] = path
214
+
215
+ max_age = cookie["max-age"]
216
+ if max_age:
217
+ try:
218
+ delta_seconds = int(max_age)
219
+ max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
220
+ self._expire_cookie(max_age_expiration, domain, path, name)
221
+ except ValueError:
222
+ cookie["max-age"] = ""
223
+
224
+ else:
225
+ expires = cookie["expires"]
226
+ if expires:
227
+ expire_time = self._parse_date(expires)
228
+ if expire_time:
229
+ self._expire_cookie(expire_time, domain, path, name)
230
+ else:
231
+ cookie["expires"] = ""
232
+
233
+ self._cookies[(domain, path)][name] = cookie
234
+
235
+ self._do_expiration()
236
+
237
+ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
238
+ """Returns this jar's cookies filtered by their attributes."""
239
+ filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
240
+ SimpleCookie() if self._quote_cookie else BaseCookie()
241
+ )
242
+ if not self._cookies:
243
+ # Skip do_expiration() if there are no cookies.
244
+ return filtered
245
+ self._do_expiration()
246
+ if not self._cookies:
247
+ # Skip rest of function if no non-expired cookies.
248
+ return filtered
249
+ request_url = URL(request_url)
250
+ hostname = request_url.raw_host or ""
251
+
252
+ is_not_secure = request_url.scheme not in ("https", "wss")
253
+ if is_not_secure and self._treat_as_secure_origin:
254
+ request_origin = URL()
255
+ with contextlib.suppress(ValueError):
256
+ request_origin = request_url.origin()
257
+ is_not_secure = request_origin not in self._treat_as_secure_origin
258
+
259
+ # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
260
+ for cookie in sorted(self, key=lambda c: len(c["path"])):
261
+ name = cookie.key
262
+ domain = cookie["domain"]
263
+
264
+ # Send shared cookies
265
+ if not domain:
266
+ filtered[name] = cookie.value
267
+ continue
268
+
269
+ if not self._unsafe and is_ip_address(hostname):
270
+ continue
271
+
272
+ if (domain, name) in self._host_only_cookies:
273
+ if domain != hostname:
274
+ continue
275
+ elif not self._is_domain_match(domain, hostname):
276
+ continue
277
+
278
+ if not self._is_path_match(request_url.path, cookie["path"]):
279
+ continue
280
+
281
+ if is_not_secure and cookie["secure"]:
282
+ continue
283
+
284
+ # It's critical we use the Morsel so the coded_value
285
+ # (based on cookie version) is preserved
286
+ mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
287
+ mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
288
+ filtered[name] = mrsl_val
289
+
290
+ return filtered
291
+
292
+ @staticmethod
293
+ def _is_domain_match(domain: str, hostname: str) -> bool:
294
+ """Implements domain matching adhering to RFC 6265."""
295
+ if hostname == domain:
296
+ return True
297
+
298
+ if not hostname.endswith(domain):
299
+ return False
300
+
301
+ non_matching = hostname[: -len(domain)]
302
+
303
+ if not non_matching.endswith("."):
304
+ return False
305
+
306
+ return not is_ip_address(hostname)
307
+
308
+ @staticmethod
309
+ def _is_path_match(req_path: str, cookie_path: str) -> bool:
310
+ """Implements path matching adhering to RFC 6265."""
311
+ if not req_path.startswith("/"):
312
+ req_path = "/"
313
+
314
+ if req_path == cookie_path:
315
+ return True
316
+
317
+ if not req_path.startswith(cookie_path):
318
+ return False
319
+
320
+ if cookie_path.endswith("/"):
321
+ return True
322
+
323
+ non_matching = req_path[len(cookie_path) :]
324
+
325
+ return non_matching.startswith("/")
326
+
327
+ @classmethod
328
+ def _parse_date(cls, date_str: str) -> Optional[int]:
329
+ """Implements date string parsing adhering to RFC 6265."""
330
+ if not date_str:
331
+ return None
332
+
333
+ found_time = False
334
+ found_day = False
335
+ found_month = False
336
+ found_year = False
337
+
338
+ hour = minute = second = 0
339
+ day = 0
340
+ month = 0
341
+ year = 0
342
+
343
+ for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
344
+
345
+ token = token_match.group("token")
346
+
347
+ if not found_time:
348
+ time_match = cls.DATE_HMS_TIME_RE.match(token)
349
+ if time_match:
350
+ found_time = True
351
+ hour, minute, second = (int(s) for s in time_match.groups())
352
+ continue
353
+
354
+ if not found_day:
355
+ day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
356
+ if day_match:
357
+ found_day = True
358
+ day = int(day_match.group())
359
+ continue
360
+
361
+ if not found_month:
362
+ month_match = cls.DATE_MONTH_RE.match(token)
363
+ if month_match:
364
+ found_month = True
365
+ assert month_match.lastindex is not None
366
+ month = month_match.lastindex
367
+ continue
368
+
369
+ if not found_year:
370
+ year_match = cls.DATE_YEAR_RE.match(token)
371
+ if year_match:
372
+ found_year = True
373
+ year = int(year_match.group())
374
+
375
+ if 70 <= year <= 99:
376
+ year += 1900
377
+ elif 0 <= year <= 69:
378
+ year += 2000
379
+
380
+ if False in (found_day, found_month, found_year, found_time):
381
+ return None
382
+
383
+ if not 1 <= day <= 31:
384
+ return None
385
+
386
+ if year < 1601 or hour > 23 or minute > 59 or second > 59:
387
+ return None
388
+
389
+ return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
390
+
391
+
392
+ class DummyCookieJar(AbstractCookieJar):
393
+ """Implements a dummy cookie storage.
394
+
395
+ It can be used with the ClientSession when no cookie processing is needed.
396
+
397
+ """
398
+
399
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
400
+ super().__init__(loop=loop)
401
+
402
+ def __iter__(self) -> "Iterator[Morsel[str]]":
403
+ while False:
404
+ yield None
405
+
406
+ def __len__(self) -> int:
407
+ return 0
408
+
409
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
410
+ pass
411
+
412
+ def clear_domain(self, domain: str) -> None:
413
+ pass
414
+
415
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
416
+ pass
417
+
418
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
419
+ return SimpleCookie()
env-llmeval/lib/python3.10/site-packages/aiohttp/formdata.py ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import io
2
+ import warnings
3
+ from typing import Any, Iterable, List, Optional
4
+ from urllib.parse import urlencode
5
+
6
+ from multidict import MultiDict, MultiDictProxy
7
+
8
+ from . import hdrs, multipart, payload
9
+ from .helpers import guess_filename
10
+ from .payload import Payload
11
+
12
+ __all__ = ("FormData",)
13
+
14
+
15
+ class FormData:
16
+ """Helper class for form body generation.
17
+
18
+ Supports multipart/form-data and application/x-www-form-urlencoded.
19
+ """
20
+
21
+ def __init__(
22
+ self,
23
+ fields: Iterable[Any] = (),
24
+ quote_fields: bool = True,
25
+ charset: Optional[str] = None,
26
+ ) -> None:
27
+ self._writer = multipart.MultipartWriter("form-data")
28
+ self._fields: List[Any] = []
29
+ self._is_multipart = False
30
+ self._is_processed = False
31
+ self._quote_fields = quote_fields
32
+ self._charset = charset
33
+
34
+ if isinstance(fields, dict):
35
+ fields = list(fields.items())
36
+ elif not isinstance(fields, (list, tuple)):
37
+ fields = (fields,)
38
+ self.add_fields(*fields)
39
+
40
+ @property
41
+ def is_multipart(self) -> bool:
42
+ return self._is_multipart
43
+
44
+ def add_field(
45
+ self,
46
+ name: str,
47
+ value: Any,
48
+ *,
49
+ content_type: Optional[str] = None,
50
+ filename: Optional[str] = None,
51
+ content_transfer_encoding: Optional[str] = None,
52
+ ) -> None:
53
+
54
+ if isinstance(value, io.IOBase):
55
+ self._is_multipart = True
56
+ elif isinstance(value, (bytes, bytearray, memoryview)):
57
+ msg = (
58
+ "In v4, passing bytes will no longer create a file field. "
59
+ "Please explicitly use the filename parameter or pass a BytesIO object."
60
+ )
61
+ if filename is None and content_transfer_encoding is None:
62
+ warnings.warn(msg, DeprecationWarning)
63
+ filename = name
64
+
65
+ type_options: MultiDict[str] = MultiDict({"name": name})
66
+ if filename is not None and not isinstance(filename, str):
67
+ raise TypeError(
68
+ "filename must be an instance of str. " "Got: %s" % filename
69
+ )
70
+ if filename is None and isinstance(value, io.IOBase):
71
+ filename = guess_filename(value, name)
72
+ if filename is not None:
73
+ type_options["filename"] = filename
74
+ self._is_multipart = True
75
+
76
+ headers = {}
77
+ if content_type is not None:
78
+ if not isinstance(content_type, str):
79
+ raise TypeError(
80
+ "content_type must be an instance of str. " "Got: %s" % content_type
81
+ )
82
+ headers[hdrs.CONTENT_TYPE] = content_type
83
+ self._is_multipart = True
84
+ if content_transfer_encoding is not None:
85
+ if not isinstance(content_transfer_encoding, str):
86
+ raise TypeError(
87
+ "content_transfer_encoding must be an instance"
88
+ " of str. Got: %s" % content_transfer_encoding
89
+ )
90
+ msg = (
91
+ "content_transfer_encoding is deprecated. "
92
+ "To maintain compatibility with v4 please pass a BytesPayload."
93
+ )
94
+ warnings.warn(msg, DeprecationWarning)
95
+ self._is_multipart = True
96
+
97
+ self._fields.append((type_options, headers, value))
98
+
99
+ def add_fields(self, *fields: Any) -> None:
100
+ to_add = list(fields)
101
+
102
+ while to_add:
103
+ rec = to_add.pop(0)
104
+
105
+ if isinstance(rec, io.IOBase):
106
+ k = guess_filename(rec, "unknown")
107
+ self.add_field(k, rec) # type: ignore[arg-type]
108
+
109
+ elif isinstance(rec, (MultiDictProxy, MultiDict)):
110
+ to_add.extend(rec.items())
111
+
112
+ elif isinstance(rec, (list, tuple)) and len(rec) == 2:
113
+ k, fp = rec
114
+ self.add_field(k, fp) # type: ignore[arg-type]
115
+
116
+ else:
117
+ raise TypeError(
118
+ "Only io.IOBase, multidict and (name, file) "
119
+ "pairs allowed, use .add_field() for passing "
120
+ "more complex parameters, got {!r}".format(rec)
121
+ )
122
+
123
+ def _gen_form_urlencoded(self) -> payload.BytesPayload:
124
+ # form data (x-www-form-urlencoded)
125
+ data = []
126
+ for type_options, _, value in self._fields:
127
+ data.append((type_options["name"], value))
128
+
129
+ charset = self._charset if self._charset is not None else "utf-8"
130
+
131
+ if charset == "utf-8":
132
+ content_type = "application/x-www-form-urlencoded"
133
+ else:
134
+ content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
135
+
136
+ return payload.BytesPayload(
137
+ urlencode(data, doseq=True, encoding=charset).encode(),
138
+ content_type=content_type,
139
+ )
140
+
141
+ def _gen_form_data(self) -> multipart.MultipartWriter:
142
+ """Encode a list of fields using the multipart/form-data MIME format"""
143
+ if self._is_processed:
144
+ raise RuntimeError("Form data has been processed already")
145
+ for dispparams, headers, value in self._fields:
146
+ try:
147
+ if hdrs.CONTENT_TYPE in headers:
148
+ part = payload.get_payload(
149
+ value,
150
+ content_type=headers[hdrs.CONTENT_TYPE],
151
+ headers=headers,
152
+ encoding=self._charset,
153
+ )
154
+ else:
155
+ part = payload.get_payload(
156
+ value, headers=headers, encoding=self._charset
157
+ )
158
+ except Exception as exc:
159
+ raise TypeError(
160
+ "Can not serialize value type: %r\n "
161
+ "headers: %r\n value: %r" % (type(value), headers, value)
162
+ ) from exc
163
+
164
+ if dispparams:
165
+ part.set_content_disposition(
166
+ "form-data", quote_fields=self._quote_fields, **dispparams
167
+ )
168
+ # FIXME cgi.FieldStorage doesn't likes body parts with
169
+ # Content-Length which were sent via chunked transfer encoding
170
+ assert part.headers is not None
171
+ part.headers.popall(hdrs.CONTENT_LENGTH, None)
172
+
173
+ self._writer.append_payload(part)
174
+
175
+ self._is_processed = True
176
+ return self._writer
177
+
178
+ def __call__(self) -> Payload:
179
+ if self._is_multipart:
180
+ return self._gen_form_data()
181
+ else:
182
+ return self._gen_form_urlencoded()
env-llmeval/lib/python3.10/site-packages/aiohttp/hdrs.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP Headers constants."""
2
+
3
+ # After changing the file content call ./tools/gen.py
4
+ # to regenerate the headers parser
5
+ from typing import Final, Set
6
+
7
+ from multidict import istr
8
+
9
+ METH_ANY: Final[str] = "*"
10
+ METH_CONNECT: Final[str] = "CONNECT"
11
+ METH_HEAD: Final[str] = "HEAD"
12
+ METH_GET: Final[str] = "GET"
13
+ METH_DELETE: Final[str] = "DELETE"
14
+ METH_OPTIONS: Final[str] = "OPTIONS"
15
+ METH_PATCH: Final[str] = "PATCH"
16
+ METH_POST: Final[str] = "POST"
17
+ METH_PUT: Final[str] = "PUT"
18
+ METH_TRACE: Final[str] = "TRACE"
19
+
20
+ METH_ALL: Final[Set[str]] = {
21
+ METH_CONNECT,
22
+ METH_HEAD,
23
+ METH_GET,
24
+ METH_DELETE,
25
+ METH_OPTIONS,
26
+ METH_PATCH,
27
+ METH_POST,
28
+ METH_PUT,
29
+ METH_TRACE,
30
+ }
31
+
32
+ ACCEPT: Final[istr] = istr("Accept")
33
+ ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
34
+ ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
35
+ ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
36
+ ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
37
+ ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
38
+ ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
39
+ ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
40
+ ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
41
+ ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
42
+ ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
43
+ ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
44
+ ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
45
+ AGE: Final[istr] = istr("Age")
46
+ ALLOW: Final[istr] = istr("Allow")
47
+ AUTHORIZATION: Final[istr] = istr("Authorization")
48
+ CACHE_CONTROL: Final[istr] = istr("Cache-Control")
49
+ CONNECTION: Final[istr] = istr("Connection")
50
+ CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
51
+ CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
52
+ CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
53
+ CONTENT_LENGTH: Final[istr] = istr("Content-Length")
54
+ CONTENT_LOCATION: Final[istr] = istr("Content-Location")
55
+ CONTENT_MD5: Final[istr] = istr("Content-MD5")
56
+ CONTENT_RANGE: Final[istr] = istr("Content-Range")
57
+ CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
58
+ CONTENT_TYPE: Final[istr] = istr("Content-Type")
59
+ COOKIE: Final[istr] = istr("Cookie")
60
+ DATE: Final[istr] = istr("Date")
61
+ DESTINATION: Final[istr] = istr("Destination")
62
+ DIGEST: Final[istr] = istr("Digest")
63
+ ETAG: Final[istr] = istr("Etag")
64
+ EXPECT: Final[istr] = istr("Expect")
65
+ EXPIRES: Final[istr] = istr("Expires")
66
+ FORWARDED: Final[istr] = istr("Forwarded")
67
+ FROM: Final[istr] = istr("From")
68
+ HOST: Final[istr] = istr("Host")
69
+ IF_MATCH: Final[istr] = istr("If-Match")
70
+ IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
71
+ IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
72
+ IF_RANGE: Final[istr] = istr("If-Range")
73
+ IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
74
+ KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
75
+ LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
76
+ LAST_MODIFIED: Final[istr] = istr("Last-Modified")
77
+ LINK: Final[istr] = istr("Link")
78
+ LOCATION: Final[istr] = istr("Location")
79
+ MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
80
+ ORIGIN: Final[istr] = istr("Origin")
81
+ PRAGMA: Final[istr] = istr("Pragma")
82
+ PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
83
+ PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
84
+ RANGE: Final[istr] = istr("Range")
85
+ REFERER: Final[istr] = istr("Referer")
86
+ RETRY_AFTER: Final[istr] = istr("Retry-After")
87
+ SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
88
+ SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
89
+ SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
90
+ SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
91
+ SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
92
+ SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
93
+ SERVER: Final[istr] = istr("Server")
94
+ SET_COOKIE: Final[istr] = istr("Set-Cookie")
95
+ TE: Final[istr] = istr("TE")
96
+ TRAILER: Final[istr] = istr("Trailer")
97
+ TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
98
+ UPGRADE: Final[istr] = istr("Upgrade")
99
+ URI: Final[istr] = istr("URI")
100
+ USER_AGENT: Final[istr] = istr("User-Agent")
101
+ VARY: Final[istr] = istr("Vary")
102
+ VIA: Final[istr] = istr("Via")
103
+ WANT_DIGEST: Final[istr] = istr("Want-Digest")
104
+ WARNING: Final[istr] = istr("Warning")
105
+ WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
106
+ X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
107
+ X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
108
+ X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
env-llmeval/lib/python3.10/site-packages/aiohttp/helpers.py ADDED
@@ -0,0 +1,1029 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Various helper functions"""
2
+
3
+ import asyncio
4
+ import base64
5
+ import binascii
6
+ import contextlib
7
+ import datetime
8
+ import enum
9
+ import functools
10
+ import inspect
11
+ import netrc
12
+ import os
13
+ import platform
14
+ import re
15
+ import sys
16
+ import time
17
+ import warnings
18
+ import weakref
19
+ from collections import namedtuple
20
+ from contextlib import suppress
21
+ from email.parser import HeaderParser
22
+ from email.utils import parsedate
23
+ from math import ceil
24
+ from pathlib import Path
25
+ from types import TracebackType
26
+ from typing import (
27
+ Any,
28
+ Callable,
29
+ ContextManager,
30
+ Dict,
31
+ Generator,
32
+ Generic,
33
+ Iterable,
34
+ Iterator,
35
+ List,
36
+ Mapping,
37
+ Optional,
38
+ Pattern,
39
+ Protocol,
40
+ Tuple,
41
+ Type,
42
+ TypeVar,
43
+ Union,
44
+ get_args,
45
+ overload,
46
+ )
47
+ from urllib.parse import quote
48
+ from urllib.request import getproxies, proxy_bypass
49
+
50
+ import attr
51
+ from multidict import MultiDict, MultiDictProxy, MultiMapping
52
+ from yarl import URL
53
+
54
+ from . import hdrs
55
+ from .log import client_logger, internal_logger
56
+
57
+ if sys.version_info >= (3, 11):
58
+ import asyncio as async_timeout
59
+ else:
60
+ import async_timeout
61
+
62
+ __all__ = ("BasicAuth", "ChainMapProxy", "ETag")
63
+
64
+ IS_MACOS = platform.system() == "Darwin"
65
+ IS_WINDOWS = platform.system() == "Windows"
66
+
67
+ PY_310 = sys.version_info >= (3, 10)
68
+ PY_311 = sys.version_info >= (3, 11)
69
+
70
+
71
+ _T = TypeVar("_T")
72
+ _S = TypeVar("_S")
73
+
74
+ _SENTINEL = enum.Enum("_SENTINEL", "sentinel")
75
+ sentinel = _SENTINEL.sentinel
76
+
77
+ NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
78
+
79
+ DEBUG = sys.flags.dev_mode or (
80
+ not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
81
+ )
82
+
83
+
84
+ CHAR = {chr(i) for i in range(0, 128)}
85
+ CTL = {chr(i) for i in range(0, 32)} | {
86
+ chr(127),
87
+ }
88
+ SEPARATORS = {
89
+ "(",
90
+ ")",
91
+ "<",
92
+ ">",
93
+ "@",
94
+ ",",
95
+ ";",
96
+ ":",
97
+ "\\",
98
+ '"',
99
+ "/",
100
+ "[",
101
+ "]",
102
+ "?",
103
+ "=",
104
+ "{",
105
+ "}",
106
+ " ",
107
+ chr(9),
108
+ }
109
+ TOKEN = CHAR ^ CTL ^ SEPARATORS
110
+
111
+
112
+ class noop:
113
+ def __await__(self) -> Generator[None, None, None]:
114
+ yield
115
+
116
+
117
+ class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
118
+ """Http basic authentication helper."""
119
+
120
+ def __new__(
121
+ cls, login: str, password: str = "", encoding: str = "latin1"
122
+ ) -> "BasicAuth":
123
+ if login is None:
124
+ raise ValueError("None is not allowed as login value")
125
+
126
+ if password is None:
127
+ raise ValueError("None is not allowed as password value")
128
+
129
+ if ":" in login:
130
+ raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
131
+
132
+ return super().__new__(cls, login, password, encoding)
133
+
134
+ @classmethod
135
+ def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
136
+ """Create a BasicAuth object from an Authorization HTTP header."""
137
+ try:
138
+ auth_type, encoded_credentials = auth_header.split(" ", 1)
139
+ except ValueError:
140
+ raise ValueError("Could not parse authorization header.")
141
+
142
+ if auth_type.lower() != "basic":
143
+ raise ValueError("Unknown authorization method %s" % auth_type)
144
+
145
+ try:
146
+ decoded = base64.b64decode(
147
+ encoded_credentials.encode("ascii"), validate=True
148
+ ).decode(encoding)
149
+ except binascii.Error:
150
+ raise ValueError("Invalid base64 encoding.")
151
+
152
+ try:
153
+ # RFC 2617 HTTP Authentication
154
+ # https://www.ietf.org/rfc/rfc2617.txt
155
+ # the colon must be present, but the username and password may be
156
+ # otherwise blank.
157
+ username, password = decoded.split(":", 1)
158
+ except ValueError:
159
+ raise ValueError("Invalid credentials.")
160
+
161
+ return cls(username, password, encoding=encoding)
162
+
163
+ @classmethod
164
+ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
165
+ """Create BasicAuth from url."""
166
+ if not isinstance(url, URL):
167
+ raise TypeError("url should be yarl.URL instance")
168
+ if url.user is None:
169
+ return None
170
+ return cls(url.user, url.password or "", encoding=encoding)
171
+
172
+ def encode(self) -> str:
173
+ """Encode credentials."""
174
+ creds = (f"{self.login}:{self.password}").encode(self.encoding)
175
+ return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
176
+
177
+
178
+ def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
179
+ auth = BasicAuth.from_url(url)
180
+ if auth is None:
181
+ return url, None
182
+ else:
183
+ return url.with_user(None), auth
184
+
185
+
186
+ def netrc_from_env() -> Optional[netrc.netrc]:
187
+ """Load netrc from file.
188
+
189
+ Attempt to load it from the path specified by the env-var
190
+ NETRC or in the default location in the user's home directory.
191
+
192
+ Returns None if it couldn't be found or fails to parse.
193
+ """
194
+ netrc_env = os.environ.get("NETRC")
195
+
196
+ if netrc_env is not None:
197
+ netrc_path = Path(netrc_env)
198
+ else:
199
+ try:
200
+ home_dir = Path.home()
201
+ except RuntimeError as e: # pragma: no cover
202
+ # if pathlib can't resolve home, it may raise a RuntimeError
203
+ client_logger.debug(
204
+ "Could not resolve home directory when "
205
+ "trying to look for .netrc file: %s",
206
+ e,
207
+ )
208
+ return None
209
+
210
+ netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
211
+
212
+ try:
213
+ return netrc.netrc(str(netrc_path))
214
+ except netrc.NetrcParseError as e:
215
+ client_logger.warning("Could not parse .netrc file: %s", e)
216
+ except OSError as e:
217
+ netrc_exists = False
218
+ with contextlib.suppress(OSError):
219
+ netrc_exists = netrc_path.is_file()
220
+ # we couldn't read the file (doesn't exist, permissions, etc.)
221
+ if netrc_env or netrc_exists:
222
+ # only warn if the environment wanted us to load it,
223
+ # or it appears like the default file does actually exist
224
+ client_logger.warning("Could not read .netrc file: %s", e)
225
+
226
+ return None
227
+
228
+
229
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
230
+ class ProxyInfo:
231
+ proxy: URL
232
+ proxy_auth: Optional[BasicAuth]
233
+
234
+
235
+ def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
236
+ """
237
+ Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
238
+
239
+ :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
240
+ entry is found for the ``host``.
241
+ """
242
+ if netrc_obj is None:
243
+ raise LookupError("No .netrc file found")
244
+ auth_from_netrc = netrc_obj.authenticators(host)
245
+
246
+ if auth_from_netrc is None:
247
+ raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
248
+ login, account, password = auth_from_netrc
249
+
250
+ # TODO(PY311): username = login or account
251
+ # Up to python 3.10, account could be None if not specified,
252
+ # and login will be empty string if not specified. From 3.11,
253
+ # login and account will be empty string if not specified.
254
+ username = login if (login or account is None) else account
255
+
256
+ # TODO(PY311): Remove this, as password will be empty string
257
+ # if not specified
258
+ if password is None:
259
+ password = ""
260
+
261
+ return BasicAuth(username, password)
262
+
263
+
264
+ def proxies_from_env() -> Dict[str, ProxyInfo]:
265
+ proxy_urls = {
266
+ k: URL(v)
267
+ for k, v in getproxies().items()
268
+ if k in ("http", "https", "ws", "wss")
269
+ }
270
+ netrc_obj = netrc_from_env()
271
+ stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
272
+ ret = {}
273
+ for proto, val in stripped.items():
274
+ proxy, auth = val
275
+ if proxy.scheme in ("https", "wss"):
276
+ client_logger.warning(
277
+ "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
278
+ )
279
+ continue
280
+ if netrc_obj and auth is None:
281
+ if proxy.host is not None:
282
+ try:
283
+ auth = basicauth_from_netrc(netrc_obj, proxy.host)
284
+ except LookupError:
285
+ auth = None
286
+ ret[proto] = ProxyInfo(proxy, auth)
287
+ return ret
288
+
289
+
290
+ def current_task(
291
+ loop: Optional[asyncio.AbstractEventLoop] = None,
292
+ ) -> "Optional[asyncio.Task[Any]]":
293
+ return asyncio.current_task(loop=loop)
294
+
295
+
296
+ def get_running_loop(
297
+ loop: Optional[asyncio.AbstractEventLoop] = None,
298
+ ) -> asyncio.AbstractEventLoop:
299
+ if loop is None:
300
+ loop = asyncio.get_event_loop()
301
+ if not loop.is_running():
302
+ warnings.warn(
303
+ "The object should be created within an async function",
304
+ DeprecationWarning,
305
+ stacklevel=3,
306
+ )
307
+ if loop.get_debug():
308
+ internal_logger.warning(
309
+ "The object should be created within an async function", stack_info=True
310
+ )
311
+ return loop
312
+
313
+
314
+ def isasyncgenfunction(obj: Any) -> bool:
315
+ func = getattr(inspect, "isasyncgenfunction", None)
316
+ if func is not None:
317
+ return func(obj) # type: ignore[no-any-return]
318
+ else:
319
+ return False
320
+
321
+
322
+ def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
323
+ """Get a permitted proxy for the given URL from the env."""
324
+ if url.host is not None and proxy_bypass(url.host):
325
+ raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
326
+
327
+ proxies_in_env = proxies_from_env()
328
+ try:
329
+ proxy_info = proxies_in_env[url.scheme]
330
+ except KeyError:
331
+ raise LookupError(f"No proxies found for `{url!s}` in the env")
332
+ else:
333
+ return proxy_info.proxy, proxy_info.proxy_auth
334
+
335
+
336
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
337
+ class MimeType:
338
+ type: str
339
+ subtype: str
340
+ suffix: str
341
+ parameters: "MultiDictProxy[str]"
342
+
343
+
344
+ @functools.lru_cache(maxsize=56)
345
+ def parse_mimetype(mimetype: str) -> MimeType:
346
+ """Parses a MIME type into its components.
347
+
348
+ mimetype is a MIME type string.
349
+
350
+ Returns a MimeType object.
351
+
352
+ Example:
353
+
354
+ >>> parse_mimetype('text/html; charset=utf-8')
355
+ MimeType(type='text', subtype='html', suffix='',
356
+ parameters={'charset': 'utf-8'})
357
+
358
+ """
359
+ if not mimetype:
360
+ return MimeType(
361
+ type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
362
+ )
363
+
364
+ parts = mimetype.split(";")
365
+ params: MultiDict[str] = MultiDict()
366
+ for item in parts[1:]:
367
+ if not item:
368
+ continue
369
+ key, _, value = item.partition("=")
370
+ params.add(key.lower().strip(), value.strip(' "'))
371
+
372
+ fulltype = parts[0].strip().lower()
373
+ if fulltype == "*":
374
+ fulltype = "*/*"
375
+
376
+ mtype, _, stype = fulltype.partition("/")
377
+ stype, _, suffix = stype.partition("+")
378
+
379
+ return MimeType(
380
+ type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
381
+ )
382
+
383
+
384
+ def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
385
+ name = getattr(obj, "name", None)
386
+ if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
387
+ return Path(name).name
388
+ return default
389
+
390
+
391
+ not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
392
+ QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
393
+
394
+
395
+ def quoted_string(content: str) -> str:
396
+ """Return 7-bit content as quoted-string.
397
+
398
+ Format content into a quoted-string as defined in RFC5322 for
399
+ Internet Message Format. Notice that this is not the 8-bit HTTP
400
+ format, but the 7-bit email format. Content must be in usascii or
401
+ a ValueError is raised.
402
+ """
403
+ if not (QCONTENT > set(content)):
404
+ raise ValueError(f"bad content for quoted-string {content!r}")
405
+ return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
406
+
407
+
408
+ def content_disposition_header(
409
+ disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
410
+ ) -> str:
411
+ """Sets ``Content-Disposition`` header for MIME.
412
+
413
+ This is the MIME payload Content-Disposition header from RFC 2183
414
+ and RFC 7579 section 4.2, not the HTTP Content-Disposition from
415
+ RFC 6266.
416
+
417
+ disptype is a disposition type: inline, attachment, form-data.
418
+ Should be valid extension token (see RFC 2183)
419
+
420
+ quote_fields performs value quoting to 7-bit MIME headers
421
+ according to RFC 7578. Set to quote_fields to False if recipient
422
+ can take 8-bit file names and field values.
423
+
424
+ _charset specifies the charset to use when quote_fields is True.
425
+
426
+ params is a dict with disposition params.
427
+ """
428
+ if not disptype or not (TOKEN > set(disptype)):
429
+ raise ValueError("bad content disposition type {!r}" "".format(disptype))
430
+
431
+ value = disptype
432
+ if params:
433
+ lparams = []
434
+ for key, val in params.items():
435
+ if not key or not (TOKEN > set(key)):
436
+ raise ValueError(
437
+ "bad content disposition parameter" " {!r}={!r}".format(key, val)
438
+ )
439
+ if quote_fields:
440
+ if key.lower() == "filename":
441
+ qval = quote(val, "", encoding=_charset)
442
+ lparams.append((key, '"%s"' % qval))
443
+ else:
444
+ try:
445
+ qval = quoted_string(val)
446
+ except ValueError:
447
+ qval = "".join(
448
+ (_charset, "''", quote(val, "", encoding=_charset))
449
+ )
450
+ lparams.append((key + "*", qval))
451
+ else:
452
+ lparams.append((key, '"%s"' % qval))
453
+ else:
454
+ qval = val.replace("\\", "\\\\").replace('"', '\\"')
455
+ lparams.append((key, '"%s"' % qval))
456
+ sparams = "; ".join("=".join(pair) for pair in lparams)
457
+ value = "; ".join((value, sparams))
458
+ return value
459
+
460
+
461
+ class _TSelf(Protocol, Generic[_T]):
462
+ _cache: Dict[str, _T]
463
+
464
+
465
+ class reify(Generic[_T]):
466
+ """Use as a class method decorator.
467
+
468
+ It operates almost exactly like
469
+ the Python `@property` decorator, but it puts the result of the
470
+ method it decorates into the instance dict after the first call,
471
+ effectively replacing the function it decorates with an instance
472
+ variable. It is, in Python parlance, a data descriptor.
473
+ """
474
+
475
+ def __init__(self, wrapped: Callable[..., _T]) -> None:
476
+ self.wrapped = wrapped
477
+ self.__doc__ = wrapped.__doc__
478
+ self.name = wrapped.__name__
479
+
480
+ def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
481
+ try:
482
+ try:
483
+ return inst._cache[self.name]
484
+ except KeyError:
485
+ val = self.wrapped(inst)
486
+ inst._cache[self.name] = val
487
+ return val
488
+ except AttributeError:
489
+ if inst is None:
490
+ return self
491
+ raise
492
+
493
+ def __set__(self, inst: _TSelf[_T], value: _T) -> None:
494
+ raise AttributeError("reified property is read-only")
495
+
496
+
497
+ reify_py = reify
498
+
499
+ try:
500
+ from ._helpers import reify as reify_c
501
+
502
+ if not NO_EXTENSIONS:
503
+ reify = reify_c # type: ignore[misc,assignment]
504
+ except ImportError:
505
+ pass
506
+
507
+ _ipv4_pattern = (
508
+ r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
509
+ r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
510
+ )
511
+ _ipv6_pattern = (
512
+ r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
513
+ r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
514
+ r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
515
+ r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
516
+ r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
517
+ r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
518
+ r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
519
+ r":|:(:[A-F0-9]{1,4}){7})$"
520
+ )
521
+ _ipv4_regex = re.compile(_ipv4_pattern)
522
+ _ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
523
+ _ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
524
+ _ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
525
+
526
+
527
+ def _is_ip_address(
528
+ regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
529
+ ) -> bool:
530
+ if host is None:
531
+ return False
532
+ if isinstance(host, str):
533
+ return bool(regex.match(host))
534
+ elif isinstance(host, (bytes, bytearray, memoryview)):
535
+ return bool(regexb.match(host))
536
+ else:
537
+ raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
538
+
539
+
540
+ is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
541
+ is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
542
+
543
+
544
+ def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
545
+ return is_ipv4_address(host) or is_ipv6_address(host)
546
+
547
+
548
+ _cached_current_datetime: Optional[int] = None
549
+ _cached_formatted_datetime = ""
550
+
551
+
552
+ def rfc822_formatted_time() -> str:
553
+ global _cached_current_datetime
554
+ global _cached_formatted_datetime
555
+
556
+ now = int(time.time())
557
+ if now != _cached_current_datetime:
558
+ # Weekday and month names for HTTP date/time formatting;
559
+ # always English!
560
+ # Tuples are constants stored in codeobject!
561
+ _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
562
+ _monthname = (
563
+ "", # Dummy so we can use 1-based month numbers
564
+ "Jan",
565
+ "Feb",
566
+ "Mar",
567
+ "Apr",
568
+ "May",
569
+ "Jun",
570
+ "Jul",
571
+ "Aug",
572
+ "Sep",
573
+ "Oct",
574
+ "Nov",
575
+ "Dec",
576
+ )
577
+
578
+ year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
579
+ _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
580
+ _weekdayname[wd],
581
+ day,
582
+ _monthname[month],
583
+ year,
584
+ hh,
585
+ mm,
586
+ ss,
587
+ )
588
+ _cached_current_datetime = now
589
+ return _cached_formatted_datetime
590
+
591
+
592
+ def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
593
+ ref, name = info
594
+ ob = ref()
595
+ if ob is not None:
596
+ with suppress(Exception):
597
+ getattr(ob, name)()
598
+
599
+
600
+ def weakref_handle(
601
+ ob: object,
602
+ name: str,
603
+ timeout: float,
604
+ loop: asyncio.AbstractEventLoop,
605
+ timeout_ceil_threshold: float = 5,
606
+ ) -> Optional[asyncio.TimerHandle]:
607
+ if timeout is not None and timeout > 0:
608
+ when = loop.time() + timeout
609
+ if timeout >= timeout_ceil_threshold:
610
+ when = ceil(when)
611
+
612
+ return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
613
+ return None
614
+
615
+
616
+ def call_later(
617
+ cb: Callable[[], Any],
618
+ timeout: float,
619
+ loop: asyncio.AbstractEventLoop,
620
+ timeout_ceil_threshold: float = 5,
621
+ ) -> Optional[asyncio.TimerHandle]:
622
+ if timeout is not None and timeout > 0:
623
+ when = loop.time() + timeout
624
+ if timeout > timeout_ceil_threshold:
625
+ when = ceil(when)
626
+ return loop.call_at(when, cb)
627
+ return None
628
+
629
+
630
+ class TimeoutHandle:
631
+ """Timeout handle"""
632
+
633
+ def __init__(
634
+ self,
635
+ loop: asyncio.AbstractEventLoop,
636
+ timeout: Optional[float],
637
+ ceil_threshold: float = 5,
638
+ ) -> None:
639
+ self._timeout = timeout
640
+ self._loop = loop
641
+ self._ceil_threshold = ceil_threshold
642
+ self._callbacks: List[
643
+ Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
644
+ ] = []
645
+
646
+ def register(
647
+ self, callback: Callable[..., None], *args: Any, **kwargs: Any
648
+ ) -> None:
649
+ self._callbacks.append((callback, args, kwargs))
650
+
651
+ def close(self) -> None:
652
+ self._callbacks.clear()
653
+
654
+ def start(self) -> Optional[asyncio.Handle]:
655
+ timeout = self._timeout
656
+ if timeout is not None and timeout > 0:
657
+ when = self._loop.time() + timeout
658
+ if timeout >= self._ceil_threshold:
659
+ when = ceil(when)
660
+ return self._loop.call_at(when, self.__call__)
661
+ else:
662
+ return None
663
+
664
+ def timer(self) -> "BaseTimerContext":
665
+ if self._timeout is not None and self._timeout > 0:
666
+ timer = TimerContext(self._loop)
667
+ self.register(timer.timeout)
668
+ return timer
669
+ else:
670
+ return TimerNoop()
671
+
672
+ def __call__(self) -> None:
673
+ for cb, args, kwargs in self._callbacks:
674
+ with suppress(Exception):
675
+ cb(*args, **kwargs)
676
+
677
+ self._callbacks.clear()
678
+
679
+
680
+ class BaseTimerContext(ContextManager["BaseTimerContext"]):
681
+ def assert_timeout(self) -> None:
682
+ """Raise TimeoutError if timeout has been exceeded."""
683
+
684
+
685
+ class TimerNoop(BaseTimerContext):
686
+ def __enter__(self) -> BaseTimerContext:
687
+ return self
688
+
689
+ def __exit__(
690
+ self,
691
+ exc_type: Optional[Type[BaseException]],
692
+ exc_val: Optional[BaseException],
693
+ exc_tb: Optional[TracebackType],
694
+ ) -> None:
695
+ return
696
+
697
+
698
+ class TimerContext(BaseTimerContext):
699
+ """Low resolution timeout context manager"""
700
+
701
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
702
+ self._loop = loop
703
+ self._tasks: List[asyncio.Task[Any]] = []
704
+ self._cancelled = False
705
+
706
+ def assert_timeout(self) -> None:
707
+ """Raise TimeoutError if timer has already been cancelled."""
708
+ if self._cancelled:
709
+ raise asyncio.TimeoutError from None
710
+
711
+ def __enter__(self) -> BaseTimerContext:
712
+ task = current_task(loop=self._loop)
713
+
714
+ if task is None:
715
+ raise RuntimeError(
716
+ "Timeout context manager should be used " "inside a task"
717
+ )
718
+
719
+ if self._cancelled:
720
+ raise asyncio.TimeoutError from None
721
+
722
+ self._tasks.append(task)
723
+ return self
724
+
725
+ def __exit__(
726
+ self,
727
+ exc_type: Optional[Type[BaseException]],
728
+ exc_val: Optional[BaseException],
729
+ exc_tb: Optional[TracebackType],
730
+ ) -> Optional[bool]:
731
+ if self._tasks:
732
+ self._tasks.pop()
733
+
734
+ if exc_type is asyncio.CancelledError and self._cancelled:
735
+ raise asyncio.TimeoutError from None
736
+ return None
737
+
738
+ def timeout(self) -> None:
739
+ if not self._cancelled:
740
+ for task in set(self._tasks):
741
+ task.cancel()
742
+
743
+ self._cancelled = True
744
+
745
+
746
+ def ceil_timeout(
747
+ delay: Optional[float], ceil_threshold: float = 5
748
+ ) -> async_timeout.Timeout:
749
+ if delay is None or delay <= 0:
750
+ return async_timeout.timeout(None)
751
+
752
+ loop = get_running_loop()
753
+ now = loop.time()
754
+ when = now + delay
755
+ if delay > ceil_threshold:
756
+ when = ceil(when)
757
+ return async_timeout.timeout_at(when)
758
+
759
+
760
+ class HeadersMixin:
761
+ ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
762
+
763
+ _headers: MultiMapping[str]
764
+
765
+ _content_type: Optional[str] = None
766
+ _content_dict: Optional[Dict[str, str]] = None
767
+ _stored_content_type: Union[str, None, _SENTINEL] = sentinel
768
+
769
+ def _parse_content_type(self, raw: Optional[str]) -> None:
770
+ self._stored_content_type = raw
771
+ if raw is None:
772
+ # default value according to RFC 2616
773
+ self._content_type = "application/octet-stream"
774
+ self._content_dict = {}
775
+ else:
776
+ msg = HeaderParser().parsestr("Content-Type: " + raw)
777
+ self._content_type = msg.get_content_type()
778
+ params = msg.get_params(())
779
+ self._content_dict = dict(params[1:]) # First element is content type again
780
+
781
+ @property
782
+ def content_type(self) -> str:
783
+ """The value of content part for Content-Type HTTP header."""
784
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
785
+ if self._stored_content_type != raw:
786
+ self._parse_content_type(raw)
787
+ return self._content_type # type: ignore[return-value]
788
+
789
+ @property
790
+ def charset(self) -> Optional[str]:
791
+ """The value of charset part for Content-Type HTTP header."""
792
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
793
+ if self._stored_content_type != raw:
794
+ self._parse_content_type(raw)
795
+ return self._content_dict.get("charset") # type: ignore[union-attr]
796
+
797
+ @property
798
+ def content_length(self) -> Optional[int]:
799
+ """The value of Content-Length HTTP header."""
800
+ content_length = self._headers.get(hdrs.CONTENT_LENGTH)
801
+
802
+ if content_length is not None:
803
+ return int(content_length)
804
+ else:
805
+ return None
806
+
807
+
808
+ def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
809
+ if not fut.done():
810
+ fut.set_result(result)
811
+
812
+
813
+ _EXC_SENTINEL = BaseException()
814
+
815
+
816
+ class ErrorableProtocol(Protocol):
817
+ def set_exception(
818
+ self,
819
+ exc: BaseException,
820
+ exc_cause: BaseException = ...,
821
+ ) -> None:
822
+ ... # pragma: no cover
823
+
824
+
825
+ def set_exception(
826
+ fut: "asyncio.Future[_T] | ErrorableProtocol",
827
+ exc: BaseException,
828
+ exc_cause: BaseException = _EXC_SENTINEL,
829
+ ) -> None:
830
+ """Set future exception.
831
+
832
+ If the future is marked as complete, this function is a no-op.
833
+
834
+ :param exc_cause: An exception that is a direct cause of ``exc``.
835
+ Only set if provided.
836
+ """
837
+ if asyncio.isfuture(fut) and fut.done():
838
+ return
839
+
840
+ exc_is_sentinel = exc_cause is _EXC_SENTINEL
841
+ exc_causes_itself = exc is exc_cause
842
+ if not exc_is_sentinel and not exc_causes_itself:
843
+ exc.__cause__ = exc_cause
844
+
845
+ fut.set_exception(exc)
846
+
847
+
848
+ @functools.total_ordering
849
+ class AppKey(Generic[_T]):
850
+ """Keys for static typing support in Application."""
851
+
852
+ __slots__ = ("_name", "_t", "__orig_class__")
853
+
854
+ # This may be set by Python when instantiating with a generic type. We need to
855
+ # support this, in order to support types that are not concrete classes,
856
+ # like Iterable, which can't be passed as the second parameter to __init__.
857
+ __orig_class__: Type[object]
858
+
859
+ def __init__(self, name: str, t: Optional[Type[_T]] = None):
860
+ # Prefix with module name to help deduplicate key names.
861
+ frame = inspect.currentframe()
862
+ while frame:
863
+ if frame.f_code.co_name == "<module>":
864
+ module: str = frame.f_globals["__name__"]
865
+ break
866
+ frame = frame.f_back
867
+
868
+ self._name = module + "." + name
869
+ self._t = t
870
+
871
+ def __lt__(self, other: object) -> bool:
872
+ if isinstance(other, AppKey):
873
+ return self._name < other._name
874
+ return True # Order AppKey above other types.
875
+
876
+ def __repr__(self) -> str:
877
+ t = self._t
878
+ if t is None:
879
+ with suppress(AttributeError):
880
+ # Set to type arg.
881
+ t = get_args(self.__orig_class__)[0]
882
+
883
+ if t is None:
884
+ t_repr = "<<Unknown>>"
885
+ elif isinstance(t, type):
886
+ if t.__module__ == "builtins":
887
+ t_repr = t.__qualname__
888
+ else:
889
+ t_repr = f"{t.__module__}.{t.__qualname__}"
890
+ else:
891
+ t_repr = repr(t)
892
+ return f"<AppKey({self._name}, type={t_repr})>"
893
+
894
+
895
+ class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
896
+ __slots__ = ("_maps",)
897
+
898
+ def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
899
+ self._maps = tuple(maps)
900
+
901
+ def __init_subclass__(cls) -> None:
902
+ raise TypeError(
903
+ "Inheritance class {} from ChainMapProxy "
904
+ "is forbidden".format(cls.__name__)
905
+ )
906
+
907
+ @overload # type: ignore[override]
908
+ def __getitem__(self, key: AppKey[_T]) -> _T:
909
+ ...
910
+
911
+ @overload
912
+ def __getitem__(self, key: str) -> Any:
913
+ ...
914
+
915
+ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
916
+ for mapping in self._maps:
917
+ try:
918
+ return mapping[key]
919
+ except KeyError:
920
+ pass
921
+ raise KeyError(key)
922
+
923
+ @overload # type: ignore[override]
924
+ def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]:
925
+ ...
926
+
927
+ @overload
928
+ def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]:
929
+ ...
930
+
931
+ @overload
932
+ def get(self, key: str, default: Any = ...) -> Any:
933
+ ...
934
+
935
+ def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
936
+ try:
937
+ return self[key]
938
+ except KeyError:
939
+ return default
940
+
941
+ def __len__(self) -> int:
942
+ # reuses stored hash values if possible
943
+ return len(set().union(*self._maps))
944
+
945
+ def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
946
+ d: Dict[Union[str, AppKey[Any]], Any] = {}
947
+ for mapping in reversed(self._maps):
948
+ # reuses stored hash values if possible
949
+ d.update(mapping)
950
+ return iter(d)
951
+
952
+ def __contains__(self, key: object) -> bool:
953
+ return any(key in m for m in self._maps)
954
+
955
+ def __bool__(self) -> bool:
956
+ return any(self._maps)
957
+
958
+ def __repr__(self) -> str:
959
+ content = ", ".join(map(repr, self._maps))
960
+ return f"ChainMapProxy({content})"
961
+
962
+
963
+ # https://tools.ietf.org/html/rfc7232#section-2.3
964
+ _ETAGC = r"[!\x23-\x7E\x80-\xff]+"
965
+ _ETAGC_RE = re.compile(_ETAGC)
966
+ _QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
967
+ QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
968
+ LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
969
+
970
+ ETAG_ANY = "*"
971
+
972
+
973
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
974
+ class ETag:
975
+ value: str
976
+ is_weak: bool = False
977
+
978
+
979
+ def validate_etag_value(value: str) -> None:
980
+ if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
981
+ raise ValueError(
982
+ f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
983
+ )
984
+
985
+
986
+ def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
987
+ """Process a date string, return a datetime object"""
988
+ if date_str is not None:
989
+ timetuple = parsedate(date_str)
990
+ if timetuple is not None:
991
+ with suppress(ValueError):
992
+ return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
993
+ return None
994
+
995
+
996
+ def must_be_empty_body(method: str, code: int) -> bool:
997
+ """Check if a request must return an empty body."""
998
+ return (
999
+ status_code_must_be_empty_body(code)
1000
+ or method_must_be_empty_body(method)
1001
+ or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
1002
+ )
1003
+
1004
+
1005
+ def method_must_be_empty_body(method: str) -> bool:
1006
+ """Check if a method must return an empty body."""
1007
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
1008
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
1009
+ return method.upper() == hdrs.METH_HEAD
1010
+
1011
+
1012
+ def status_code_must_be_empty_body(code: int) -> bool:
1013
+ """Check if a status code must return an empty body."""
1014
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
1015
+ return code in {204, 304} or 100 <= code < 200
1016
+
1017
+
1018
+ def should_remove_content_length(method: str, code: int) -> bool:
1019
+ """Check if a Content-Length header should be removed.
1020
+
1021
+ This should always be a subset of must_be_empty_body
1022
+ """
1023
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
1024
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
1025
+ return (
1026
+ code in {204, 304}
1027
+ or 100 <= code < 200
1028
+ or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
1029
+ )
env-llmeval/lib/python3.10/site-packages/aiohttp/http.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from http import HTTPStatus
3
+ from typing import Mapping, Tuple
4
+
5
+ from . import __version__
6
+ from .http_exceptions import HttpProcessingError as HttpProcessingError
7
+ from .http_parser import (
8
+ HeadersParser as HeadersParser,
9
+ HttpParser as HttpParser,
10
+ HttpRequestParser as HttpRequestParser,
11
+ HttpResponseParser as HttpResponseParser,
12
+ RawRequestMessage as RawRequestMessage,
13
+ RawResponseMessage as RawResponseMessage,
14
+ )
15
+ from .http_websocket import (
16
+ WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
17
+ WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
18
+ WS_KEY as WS_KEY,
19
+ WebSocketError as WebSocketError,
20
+ WebSocketReader as WebSocketReader,
21
+ WebSocketWriter as WebSocketWriter,
22
+ WSCloseCode as WSCloseCode,
23
+ WSMessage as WSMessage,
24
+ WSMsgType as WSMsgType,
25
+ ws_ext_gen as ws_ext_gen,
26
+ ws_ext_parse as ws_ext_parse,
27
+ )
28
+ from .http_writer import (
29
+ HttpVersion as HttpVersion,
30
+ HttpVersion10 as HttpVersion10,
31
+ HttpVersion11 as HttpVersion11,
32
+ StreamWriter as StreamWriter,
33
+ )
34
+
35
+ __all__ = (
36
+ "HttpProcessingError",
37
+ "RESPONSES",
38
+ "SERVER_SOFTWARE",
39
+ # .http_writer
40
+ "StreamWriter",
41
+ "HttpVersion",
42
+ "HttpVersion10",
43
+ "HttpVersion11",
44
+ # .http_parser
45
+ "HeadersParser",
46
+ "HttpParser",
47
+ "HttpRequestParser",
48
+ "HttpResponseParser",
49
+ "RawRequestMessage",
50
+ "RawResponseMessage",
51
+ # .http_websocket
52
+ "WS_CLOSED_MESSAGE",
53
+ "WS_CLOSING_MESSAGE",
54
+ "WS_KEY",
55
+ "WebSocketReader",
56
+ "WebSocketWriter",
57
+ "ws_ext_gen",
58
+ "ws_ext_parse",
59
+ "WSMessage",
60
+ "WebSocketError",
61
+ "WSMsgType",
62
+ "WSCloseCode",
63
+ )
64
+
65
+
66
+ SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
67
+ sys.version_info, __version__
68
+ )
69
+
70
+ RESPONSES: Mapping[int, Tuple[str, str]] = {
71
+ v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
72
+ }
env-llmeval/lib/python3.10/site-packages/aiohttp/http_exceptions.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Low-level http related exceptions."""
2
+
3
+
4
+ from textwrap import indent
5
+ from typing import Optional, Union
6
+
7
+ from .typedefs import _CIMultiDict
8
+
9
+ __all__ = ("HttpProcessingError",)
10
+
11
+
12
+ class HttpProcessingError(Exception):
13
+ """HTTP error.
14
+
15
+ Shortcut for raising HTTP errors with custom code, message and headers.
16
+
17
+ code: HTTP Error code.
18
+ message: (optional) Error message.
19
+ headers: (optional) Headers to be sent in response, a list of pairs
20
+ """
21
+
22
+ code = 0
23
+ message = ""
24
+ headers = None
25
+
26
+ def __init__(
27
+ self,
28
+ *,
29
+ code: Optional[int] = None,
30
+ message: str = "",
31
+ headers: Optional[_CIMultiDict] = None,
32
+ ) -> None:
33
+ if code is not None:
34
+ self.code = code
35
+ self.headers = headers
36
+ self.message = message
37
+
38
+ def __str__(self) -> str:
39
+ msg = indent(self.message, " ")
40
+ return f"{self.code}, message:\n{msg}"
41
+
42
+ def __repr__(self) -> str:
43
+ return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
44
+
45
+
46
+ class BadHttpMessage(HttpProcessingError):
47
+
48
+ code = 400
49
+ message = "Bad Request"
50
+
51
+ def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
52
+ super().__init__(message=message, headers=headers)
53
+ self.args = (message,)
54
+
55
+
56
+ class HttpBadRequest(BadHttpMessage):
57
+
58
+ code = 400
59
+ message = "Bad Request"
60
+
61
+
62
+ class PayloadEncodingError(BadHttpMessage):
63
+ """Base class for payload errors"""
64
+
65
+
66
+ class ContentEncodingError(PayloadEncodingError):
67
+ """Content encoding error."""
68
+
69
+
70
+ class TransferEncodingError(PayloadEncodingError):
71
+ """transfer encoding error."""
72
+
73
+
74
+ class ContentLengthError(PayloadEncodingError):
75
+ """Not enough data for satisfy content length header."""
76
+
77
+
78
+ class LineTooLong(BadHttpMessage):
79
+ def __init__(
80
+ self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
81
+ ) -> None:
82
+ super().__init__(
83
+ f"Got more than {limit} bytes ({actual_size}) when reading {line}."
84
+ )
85
+ self.args = (line, limit, actual_size)
86
+
87
+
88
+ class InvalidHeader(BadHttpMessage):
89
+ def __init__(self, hdr: Union[bytes, str]) -> None:
90
+ hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
91
+ super().__init__(f"Invalid HTTP header: {hdr!r}")
92
+ self.hdr = hdr_s
93
+ self.args = (hdr,)
94
+
95
+
96
+ class BadStatusLine(BadHttpMessage):
97
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
98
+ if not isinstance(line, str):
99
+ line = repr(line)
100
+ super().__init__(error or f"Bad status line {line!r}")
101
+ self.args = (line,)
102
+ self.line = line
103
+
104
+
105
+ class InvalidURLError(BadHttpMessage):
106
+ pass
env-llmeval/lib/python3.10/site-packages/aiohttp/http_parser.py ADDED
@@ -0,0 +1,1041 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ import asyncio
3
+ import re
4
+ import string
5
+ from contextlib import suppress
6
+ from enum import IntEnum
7
+ from typing import (
8
+ Any,
9
+ ClassVar,
10
+ Final,
11
+ Generic,
12
+ List,
13
+ Literal,
14
+ NamedTuple,
15
+ Optional,
16
+ Pattern,
17
+ Set,
18
+ Tuple,
19
+ Type,
20
+ TypeVar,
21
+ Union,
22
+ )
23
+
24
+ from multidict import CIMultiDict, CIMultiDictProxy, istr
25
+ from yarl import URL
26
+
27
+ from . import hdrs
28
+ from .base_protocol import BaseProtocol
29
+ from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
30
+ from .helpers import (
31
+ _EXC_SENTINEL,
32
+ DEBUG,
33
+ NO_EXTENSIONS,
34
+ BaseTimerContext,
35
+ method_must_be_empty_body,
36
+ set_exception,
37
+ status_code_must_be_empty_body,
38
+ )
39
+ from .http_exceptions import (
40
+ BadHttpMessage,
41
+ BadStatusLine,
42
+ ContentEncodingError,
43
+ ContentLengthError,
44
+ InvalidHeader,
45
+ InvalidURLError,
46
+ LineTooLong,
47
+ TransferEncodingError,
48
+ )
49
+ from .http_writer import HttpVersion, HttpVersion10
50
+ from .log import internal_logger
51
+ from .streams import EMPTY_PAYLOAD, StreamReader
52
+ from .typedefs import RawHeaders
53
+
54
+ __all__ = (
55
+ "HeadersParser",
56
+ "HttpParser",
57
+ "HttpRequestParser",
58
+ "HttpResponseParser",
59
+ "RawRequestMessage",
60
+ "RawResponseMessage",
61
+ )
62
+
63
+ _SEP = Literal[b"\r\n", b"\n"]
64
+
65
+ ASCIISET: Final[Set[str]] = set(string.printable)
66
+
67
+ # See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
68
+ # and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
69
+ #
70
+ # method = token
71
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
72
+ # "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
73
+ # token = 1*tchar
74
+ _TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
75
+ TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
76
+ VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
77
+ DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
78
+ HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
79
+
80
+
81
+ class RawRequestMessage(NamedTuple):
82
+ method: str
83
+ path: str
84
+ version: HttpVersion
85
+ headers: "CIMultiDictProxy[str]"
86
+ raw_headers: RawHeaders
87
+ should_close: bool
88
+ compression: Optional[str]
89
+ upgrade: bool
90
+ chunked: bool
91
+ url: URL
92
+
93
+
94
+ class RawResponseMessage(NamedTuple):
95
+ version: HttpVersion
96
+ code: int
97
+ reason: str
98
+ headers: CIMultiDictProxy[str]
99
+ raw_headers: RawHeaders
100
+ should_close: bool
101
+ compression: Optional[str]
102
+ upgrade: bool
103
+ chunked: bool
104
+
105
+
106
+ _MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
107
+
108
+
109
+ class ParseState(IntEnum):
110
+
111
+ PARSE_NONE = 0
112
+ PARSE_LENGTH = 1
113
+ PARSE_CHUNKED = 2
114
+ PARSE_UNTIL_EOF = 3
115
+
116
+
117
+ class ChunkState(IntEnum):
118
+ PARSE_CHUNKED_SIZE = 0
119
+ PARSE_CHUNKED_CHUNK = 1
120
+ PARSE_CHUNKED_CHUNK_EOF = 2
121
+ PARSE_MAYBE_TRAILERS = 3
122
+ PARSE_TRAILERS = 4
123
+
124
+
125
+ class HeadersParser:
126
+ def __init__(
127
+ self,
128
+ max_line_size: int = 8190,
129
+ max_headers: int = 32768,
130
+ max_field_size: int = 8190,
131
+ lax: bool = False,
132
+ ) -> None:
133
+ self.max_line_size = max_line_size
134
+ self.max_headers = max_headers
135
+ self.max_field_size = max_field_size
136
+ self._lax = lax
137
+
138
+ def parse_headers(
139
+ self, lines: List[bytes]
140
+ ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
141
+ headers: CIMultiDict[str] = CIMultiDict()
142
+ # note: "raw" does not mean inclusion of OWS before/after the field value
143
+ raw_headers = []
144
+
145
+ lines_idx = 1
146
+ line = lines[1]
147
+ line_count = len(lines)
148
+
149
+ while line:
150
+ # Parse initial header name : value pair.
151
+ try:
152
+ bname, bvalue = line.split(b":", 1)
153
+ except ValueError:
154
+ raise InvalidHeader(line) from None
155
+
156
+ if len(bname) == 0:
157
+ raise InvalidHeader(bname)
158
+
159
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
160
+ if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
161
+ raise InvalidHeader(line)
162
+
163
+ bvalue = bvalue.lstrip(b" \t")
164
+ if len(bname) > self.max_field_size:
165
+ raise LineTooLong(
166
+ "request header name {}".format(
167
+ bname.decode("utf8", "backslashreplace")
168
+ ),
169
+ str(self.max_field_size),
170
+ str(len(bname)),
171
+ )
172
+ name = bname.decode("utf-8", "surrogateescape")
173
+ if not TOKENRE.fullmatch(name):
174
+ raise InvalidHeader(bname)
175
+
176
+ header_length = len(bvalue)
177
+
178
+ # next line
179
+ lines_idx += 1
180
+ line = lines[lines_idx]
181
+
182
+ # consume continuation lines
183
+ continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
184
+
185
+ # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
186
+ if continuation:
187
+ bvalue_lst = [bvalue]
188
+ while continuation:
189
+ header_length += len(line)
190
+ if header_length > self.max_field_size:
191
+ raise LineTooLong(
192
+ "request header field {}".format(
193
+ bname.decode("utf8", "backslashreplace")
194
+ ),
195
+ str(self.max_field_size),
196
+ str(header_length),
197
+ )
198
+ bvalue_lst.append(line)
199
+
200
+ # next line
201
+ lines_idx += 1
202
+ if lines_idx < line_count:
203
+ line = lines[lines_idx]
204
+ if line:
205
+ continuation = line[0] in (32, 9) # (' ', '\t')
206
+ else:
207
+ line = b""
208
+ break
209
+ bvalue = b"".join(bvalue_lst)
210
+ else:
211
+ if header_length > self.max_field_size:
212
+ raise LineTooLong(
213
+ "request header field {}".format(
214
+ bname.decode("utf8", "backslashreplace")
215
+ ),
216
+ str(self.max_field_size),
217
+ str(header_length),
218
+ )
219
+
220
+ bvalue = bvalue.strip(b" \t")
221
+ value = bvalue.decode("utf-8", "surrogateescape")
222
+
223
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
224
+ if "\n" in value or "\r" in value or "\x00" in value:
225
+ raise InvalidHeader(bvalue)
226
+
227
+ headers.add(name, value)
228
+ raw_headers.append((bname, bvalue))
229
+
230
+ return (CIMultiDictProxy(headers), tuple(raw_headers))
231
+
232
+
233
+ def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
234
+ """Check if the upgrade header is supported."""
235
+ return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
236
+
237
+
238
+ class HttpParser(abc.ABC, Generic[_MsgT]):
239
+ lax: ClassVar[bool] = False
240
+
241
+ def __init__(
242
+ self,
243
+ protocol: Optional[BaseProtocol] = None,
244
+ loop: Optional[asyncio.AbstractEventLoop] = None,
245
+ limit: int = 2**16,
246
+ max_line_size: int = 8190,
247
+ max_headers: int = 32768,
248
+ max_field_size: int = 8190,
249
+ timer: Optional[BaseTimerContext] = None,
250
+ code: Optional[int] = None,
251
+ method: Optional[str] = None,
252
+ readall: bool = False,
253
+ payload_exception: Optional[Type[BaseException]] = None,
254
+ response_with_body: bool = True,
255
+ read_until_eof: bool = False,
256
+ auto_decompress: bool = True,
257
+ ) -> None:
258
+ self.protocol = protocol
259
+ self.loop = loop
260
+ self.max_line_size = max_line_size
261
+ self.max_headers = max_headers
262
+ self.max_field_size = max_field_size
263
+ self.timer = timer
264
+ self.code = code
265
+ self.method = method
266
+ self.readall = readall
267
+ self.payload_exception = payload_exception
268
+ self.response_with_body = response_with_body
269
+ self.read_until_eof = read_until_eof
270
+
271
+ self._lines: List[bytes] = []
272
+ self._tail = b""
273
+ self._upgraded = False
274
+ self._payload = None
275
+ self._payload_parser: Optional[HttpPayloadParser] = None
276
+ self._auto_decompress = auto_decompress
277
+ self._limit = limit
278
+ self._headers_parser = HeadersParser(
279
+ max_line_size, max_headers, max_field_size, self.lax
280
+ )
281
+
282
+ @abc.abstractmethod
283
+ def parse_message(self, lines: List[bytes]) -> _MsgT:
284
+ pass
285
+
286
+ def feed_eof(self) -> Optional[_MsgT]:
287
+ if self._payload_parser is not None:
288
+ self._payload_parser.feed_eof()
289
+ self._payload_parser = None
290
+ else:
291
+ # try to extract partial message
292
+ if self._tail:
293
+ self._lines.append(self._tail)
294
+
295
+ if self._lines:
296
+ if self._lines[-1] != "\r\n":
297
+ self._lines.append(b"")
298
+ with suppress(Exception):
299
+ return self.parse_message(self._lines)
300
+ return None
301
+
302
+ def feed_data(
303
+ self,
304
+ data: bytes,
305
+ SEP: _SEP = b"\r\n",
306
+ EMPTY: bytes = b"",
307
+ CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
308
+ METH_CONNECT: str = hdrs.METH_CONNECT,
309
+ SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
310
+ ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
311
+
312
+ messages = []
313
+
314
+ if self._tail:
315
+ data, self._tail = self._tail + data, b""
316
+
317
+ data_len = len(data)
318
+ start_pos = 0
319
+ loop = self.loop
320
+
321
+ while start_pos < data_len:
322
+
323
+ # read HTTP message (request/response line + headers), \r\n\r\n
324
+ # and split by lines
325
+ if self._payload_parser is None and not self._upgraded:
326
+ pos = data.find(SEP, start_pos)
327
+ # consume \r\n
328
+ if pos == start_pos and not self._lines:
329
+ start_pos = pos + len(SEP)
330
+ continue
331
+
332
+ if pos >= start_pos:
333
+ # line found
334
+ line = data[start_pos:pos]
335
+ if SEP == b"\n": # For lax response parsing
336
+ line = line.rstrip(b"\r")
337
+ self._lines.append(line)
338
+ start_pos = pos + len(SEP)
339
+
340
+ # \r\n\r\n found
341
+ if self._lines[-1] == EMPTY:
342
+ try:
343
+ msg: _MsgT = self.parse_message(self._lines)
344
+ finally:
345
+ self._lines.clear()
346
+
347
+ def get_content_length() -> Optional[int]:
348
+ # payload length
349
+ length_hdr = msg.headers.get(CONTENT_LENGTH)
350
+ if length_hdr is None:
351
+ return None
352
+
353
+ # Shouldn't allow +/- or other number formats.
354
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
355
+ # msg.headers is already stripped of leading/trailing wsp
356
+ if not DIGITS.fullmatch(length_hdr):
357
+ raise InvalidHeader(CONTENT_LENGTH)
358
+
359
+ return int(length_hdr)
360
+
361
+ length = get_content_length()
362
+ # do not support old websocket spec
363
+ if SEC_WEBSOCKET_KEY1 in msg.headers:
364
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
365
+
366
+ self._upgraded = msg.upgrade and _is_supported_upgrade(
367
+ msg.headers
368
+ )
369
+
370
+ method = getattr(msg, "method", self.method)
371
+ # code is only present on responses
372
+ code = getattr(msg, "code", 0)
373
+
374
+ assert self.protocol is not None
375
+ # calculate payload
376
+ empty_body = status_code_must_be_empty_body(code) or bool(
377
+ method and method_must_be_empty_body(method)
378
+ )
379
+ if not empty_body and (
380
+ ((length is not None and length > 0) or msg.chunked)
381
+ and not self._upgraded
382
+ ):
383
+ payload = StreamReader(
384
+ self.protocol,
385
+ timer=self.timer,
386
+ loop=loop,
387
+ limit=self._limit,
388
+ )
389
+ payload_parser = HttpPayloadParser(
390
+ payload,
391
+ length=length,
392
+ chunked=msg.chunked,
393
+ method=method,
394
+ compression=msg.compression,
395
+ code=self.code,
396
+ readall=self.readall,
397
+ response_with_body=self.response_with_body,
398
+ auto_decompress=self._auto_decompress,
399
+ lax=self.lax,
400
+ )
401
+ if not payload_parser.done:
402
+ self._payload_parser = payload_parser
403
+ elif method == METH_CONNECT:
404
+ assert isinstance(msg, RawRequestMessage)
405
+ payload = StreamReader(
406
+ self.protocol,
407
+ timer=self.timer,
408
+ loop=loop,
409
+ limit=self._limit,
410
+ )
411
+ self._upgraded = True
412
+ self._payload_parser = HttpPayloadParser(
413
+ payload,
414
+ method=msg.method,
415
+ compression=msg.compression,
416
+ readall=True,
417
+ auto_decompress=self._auto_decompress,
418
+ lax=self.lax,
419
+ )
420
+ elif not empty_body and length is None and self.read_until_eof:
421
+ payload = StreamReader(
422
+ self.protocol,
423
+ timer=self.timer,
424
+ loop=loop,
425
+ limit=self._limit,
426
+ )
427
+ payload_parser = HttpPayloadParser(
428
+ payload,
429
+ length=length,
430
+ chunked=msg.chunked,
431
+ method=method,
432
+ compression=msg.compression,
433
+ code=self.code,
434
+ readall=True,
435
+ response_with_body=self.response_with_body,
436
+ auto_decompress=self._auto_decompress,
437
+ lax=self.lax,
438
+ )
439
+ if not payload_parser.done:
440
+ self._payload_parser = payload_parser
441
+ else:
442
+ payload = EMPTY_PAYLOAD
443
+
444
+ messages.append((msg, payload))
445
+ else:
446
+ self._tail = data[start_pos:]
447
+ data = EMPTY
448
+ break
449
+
450
+ # no parser, just store
451
+ elif self._payload_parser is None and self._upgraded:
452
+ assert not self._lines
453
+ break
454
+
455
+ # feed payload
456
+ elif data and start_pos < data_len:
457
+ assert not self._lines
458
+ assert self._payload_parser is not None
459
+ try:
460
+ eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
461
+ except BaseException as underlying_exc:
462
+ reraised_exc = underlying_exc
463
+ if self.payload_exception is not None:
464
+ reraised_exc = self.payload_exception(str(underlying_exc))
465
+
466
+ set_exception(
467
+ self._payload_parser.payload,
468
+ reraised_exc,
469
+ underlying_exc,
470
+ )
471
+
472
+ eof = True
473
+ data = b""
474
+
475
+ if eof:
476
+ start_pos = 0
477
+ data_len = len(data)
478
+ self._payload_parser = None
479
+ continue
480
+ else:
481
+ break
482
+
483
+ if data and start_pos < data_len:
484
+ data = data[start_pos:]
485
+ else:
486
+ data = EMPTY
487
+
488
+ return messages, self._upgraded, data
489
+
490
+ def parse_headers(
491
+ self, lines: List[bytes]
492
+ ) -> Tuple[
493
+ "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
494
+ ]:
495
+ """Parses RFC 5322 headers from a stream.
496
+
497
+ Line continuations are supported. Returns list of header name
498
+ and value pairs. Header name is in upper case.
499
+ """
500
+ headers, raw_headers = self._headers_parser.parse_headers(lines)
501
+ close_conn = None
502
+ encoding = None
503
+ upgrade = False
504
+ chunked = False
505
+
506
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
507
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
508
+ singletons = (
509
+ hdrs.CONTENT_LENGTH,
510
+ hdrs.CONTENT_LOCATION,
511
+ hdrs.CONTENT_RANGE,
512
+ hdrs.CONTENT_TYPE,
513
+ hdrs.ETAG,
514
+ hdrs.HOST,
515
+ hdrs.MAX_FORWARDS,
516
+ hdrs.SERVER,
517
+ hdrs.TRANSFER_ENCODING,
518
+ hdrs.USER_AGENT,
519
+ )
520
+ bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
521
+ if bad_hdr is not None:
522
+ raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
523
+
524
+ # keep-alive
525
+ conn = headers.get(hdrs.CONNECTION)
526
+ if conn:
527
+ v = conn.lower()
528
+ if v == "close":
529
+ close_conn = True
530
+ elif v == "keep-alive":
531
+ close_conn = False
532
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
533
+ elif v == "upgrade" and headers.get(hdrs.UPGRADE):
534
+ upgrade = True
535
+
536
+ # encoding
537
+ enc = headers.get(hdrs.CONTENT_ENCODING)
538
+ if enc:
539
+ enc = enc.lower()
540
+ if enc in ("gzip", "deflate", "br"):
541
+ encoding = enc
542
+
543
+ # chunking
544
+ te = headers.get(hdrs.TRANSFER_ENCODING)
545
+ if te is not None:
546
+ if "chunked" == te.lower():
547
+ chunked = True
548
+ else:
549
+ raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
550
+
551
+ if hdrs.CONTENT_LENGTH in headers:
552
+ raise BadHttpMessage(
553
+ "Transfer-Encoding can't be present with Content-Length",
554
+ )
555
+
556
+ return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
557
+
558
+ def set_upgraded(self, val: bool) -> None:
559
+ """Set connection upgraded (to websocket) mode.
560
+
561
+ :param bool val: new state.
562
+ """
563
+ self._upgraded = val
564
+
565
+
566
+ class HttpRequestParser(HttpParser[RawRequestMessage]):
567
+ """Read request status line.
568
+
569
+ Exception .http_exceptions.BadStatusLine
570
+ could be raised in case of any errors in status line.
571
+ Returns RawRequestMessage.
572
+ """
573
+
574
+ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
575
+ # request line
576
+ line = lines[0].decode("utf-8", "surrogateescape")
577
+ try:
578
+ method, path, version = line.split(" ", maxsplit=2)
579
+ except ValueError:
580
+ raise BadStatusLine(line) from None
581
+
582
+ if len(path) > self.max_line_size:
583
+ raise LineTooLong(
584
+ "Status line is too long", str(self.max_line_size), str(len(path))
585
+ )
586
+
587
+ # method
588
+ if not TOKENRE.fullmatch(method):
589
+ raise BadStatusLine(method)
590
+
591
+ # version
592
+ match = VERSRE.fullmatch(version)
593
+ if match is None:
594
+ raise BadStatusLine(line)
595
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
596
+
597
+ if method == "CONNECT":
598
+ # authority-form,
599
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
600
+ url = URL.build(authority=path, encoded=True)
601
+ elif path.startswith("/"):
602
+ # origin-form,
603
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
604
+ path_part, _hash_separator, url_fragment = path.partition("#")
605
+ path_part, _question_mark_separator, qs_part = path_part.partition("?")
606
+
607
+ # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
608
+ # NOTE: parser does, otherwise it results into the same
609
+ # NOTE: HTTP Request-Line input producing different
610
+ # NOTE: `yarl.URL()` objects
611
+ url = URL.build(
612
+ path=path_part,
613
+ query_string=qs_part,
614
+ fragment=url_fragment,
615
+ encoded=True,
616
+ )
617
+ elif path == "*" and method == "OPTIONS":
618
+ # asterisk-form,
619
+ url = URL(path, encoded=True)
620
+ else:
621
+ # absolute-form for proxy maybe,
622
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
623
+ url = URL(path, encoded=True)
624
+ if url.scheme == "":
625
+ # not absolute-form
626
+ raise InvalidURLError(
627
+ path.encode(errors="surrogateescape").decode("latin1")
628
+ )
629
+
630
+ # read headers
631
+ (
632
+ headers,
633
+ raw_headers,
634
+ close,
635
+ compression,
636
+ upgrade,
637
+ chunked,
638
+ ) = self.parse_headers(lines)
639
+
640
+ if close is None: # then the headers weren't set in the request
641
+ if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
642
+ close = True
643
+ else: # HTTP 1.1 must ask to close.
644
+ close = False
645
+
646
+ return RawRequestMessage(
647
+ method,
648
+ path,
649
+ version_o,
650
+ headers,
651
+ raw_headers,
652
+ close,
653
+ compression,
654
+ upgrade,
655
+ chunked,
656
+ url,
657
+ )
658
+
659
+
660
+ class HttpResponseParser(HttpParser[RawResponseMessage]):
661
+ """Read response status line and headers.
662
+
663
+ BadStatusLine could be raised in case of any errors in status line.
664
+ Returns RawResponseMessage.
665
+ """
666
+
667
+ # Lax mode should only be enabled on response parser.
668
+ lax = not DEBUG
669
+
670
+ def feed_data(
671
+ self,
672
+ data: bytes,
673
+ SEP: Optional[_SEP] = None,
674
+ *args: Any,
675
+ **kwargs: Any,
676
+ ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
677
+ if SEP is None:
678
+ SEP = b"\r\n" if DEBUG else b"\n"
679
+ return super().feed_data(data, SEP, *args, **kwargs)
680
+
681
+ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
682
+ line = lines[0].decode("utf-8", "surrogateescape")
683
+ try:
684
+ version, status = line.split(maxsplit=1)
685
+ except ValueError:
686
+ raise BadStatusLine(line) from None
687
+
688
+ try:
689
+ status, reason = status.split(maxsplit=1)
690
+ except ValueError:
691
+ status = status.strip()
692
+ reason = ""
693
+
694
+ if len(reason) > self.max_line_size:
695
+ raise LineTooLong(
696
+ "Status line is too long", str(self.max_line_size), str(len(reason))
697
+ )
698
+
699
+ # version
700
+ match = VERSRE.fullmatch(version)
701
+ if match is None:
702
+ raise BadStatusLine(line)
703
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
704
+
705
+ # The status code is a three-digit ASCII number, no padding
706
+ if len(status) != 3 or not DIGITS.fullmatch(status):
707
+ raise BadStatusLine(line)
708
+ status_i = int(status)
709
+
710
+ # read headers
711
+ (
712
+ headers,
713
+ raw_headers,
714
+ close,
715
+ compression,
716
+ upgrade,
717
+ chunked,
718
+ ) = self.parse_headers(lines)
719
+
720
+ if close is None:
721
+ if version_o <= HttpVersion10:
722
+ close = True
723
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
724
+ elif 100 <= status_i < 200 or status_i in {204, 304}:
725
+ close = False
726
+ elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
727
+ close = False
728
+ else:
729
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
730
+ close = True
731
+
732
+ return RawResponseMessage(
733
+ version_o,
734
+ status_i,
735
+ reason.strip(),
736
+ headers,
737
+ raw_headers,
738
+ close,
739
+ compression,
740
+ upgrade,
741
+ chunked,
742
+ )
743
+
744
+
745
+ class HttpPayloadParser:
746
+ def __init__(
747
+ self,
748
+ payload: StreamReader,
749
+ length: Optional[int] = None,
750
+ chunked: bool = False,
751
+ compression: Optional[str] = None,
752
+ code: Optional[int] = None,
753
+ method: Optional[str] = None,
754
+ readall: bool = False,
755
+ response_with_body: bool = True,
756
+ auto_decompress: bool = True,
757
+ lax: bool = False,
758
+ ) -> None:
759
+ self._length = 0
760
+ self._type = ParseState.PARSE_NONE
761
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
762
+ self._chunk_size = 0
763
+ self._chunk_tail = b""
764
+ self._auto_decompress = auto_decompress
765
+ self._lax = lax
766
+ self.done = False
767
+
768
+ # payload decompression wrapper
769
+ if response_with_body and compression and self._auto_decompress:
770
+ real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
771
+ payload, compression
772
+ )
773
+ else:
774
+ real_payload = payload
775
+
776
+ # payload parser
777
+ if not response_with_body:
778
+ # don't parse payload if it's not expected to be received
779
+ self._type = ParseState.PARSE_NONE
780
+ real_payload.feed_eof()
781
+ self.done = True
782
+
783
+ elif chunked:
784
+ self._type = ParseState.PARSE_CHUNKED
785
+ elif length is not None:
786
+ self._type = ParseState.PARSE_LENGTH
787
+ self._length = length
788
+ if self._length == 0:
789
+ real_payload.feed_eof()
790
+ self.done = True
791
+ else:
792
+ if readall and code != 204:
793
+ self._type = ParseState.PARSE_UNTIL_EOF
794
+ elif method in ("PUT", "POST"):
795
+ internal_logger.warning( # pragma: no cover
796
+ "Content-Length or Transfer-Encoding header is required"
797
+ )
798
+ self._type = ParseState.PARSE_NONE
799
+ real_payload.feed_eof()
800
+ self.done = True
801
+
802
+ self.payload = real_payload
803
+
804
+ def feed_eof(self) -> None:
805
+ if self._type == ParseState.PARSE_UNTIL_EOF:
806
+ self.payload.feed_eof()
807
+ elif self._type == ParseState.PARSE_LENGTH:
808
+ raise ContentLengthError(
809
+ "Not enough data for satisfy content length header."
810
+ )
811
+ elif self._type == ParseState.PARSE_CHUNKED:
812
+ raise TransferEncodingError(
813
+ "Not enough data for satisfy transfer length header."
814
+ )
815
+
816
+ def feed_data(
817
+ self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
818
+ ) -> Tuple[bool, bytes]:
819
+ # Read specified amount of bytes
820
+ if self._type == ParseState.PARSE_LENGTH:
821
+ required = self._length
822
+ chunk_len = len(chunk)
823
+
824
+ if required >= chunk_len:
825
+ self._length = required - chunk_len
826
+ self.payload.feed_data(chunk, chunk_len)
827
+ if self._length == 0:
828
+ self.payload.feed_eof()
829
+ return True, b""
830
+ else:
831
+ self._length = 0
832
+ self.payload.feed_data(chunk[:required], required)
833
+ self.payload.feed_eof()
834
+ return True, chunk[required:]
835
+
836
+ # Chunked transfer encoding parser
837
+ elif self._type == ParseState.PARSE_CHUNKED:
838
+ if self._chunk_tail:
839
+ chunk = self._chunk_tail + chunk
840
+ self._chunk_tail = b""
841
+
842
+ while chunk:
843
+
844
+ # read next chunk size
845
+ if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
846
+ pos = chunk.find(SEP)
847
+ if pos >= 0:
848
+ i = chunk.find(CHUNK_EXT, 0, pos)
849
+ if i >= 0:
850
+ size_b = chunk[:i] # strip chunk-extensions
851
+ else:
852
+ size_b = chunk[:pos]
853
+
854
+ if self._lax: # Allow whitespace in lax mode.
855
+ size_b = size_b.strip()
856
+
857
+ if not re.fullmatch(HEXDIGITS, size_b):
858
+ exc = TransferEncodingError(
859
+ chunk[:pos].decode("ascii", "surrogateescape")
860
+ )
861
+ set_exception(self.payload, exc)
862
+ raise exc
863
+ size = int(bytes(size_b), 16)
864
+
865
+ chunk = chunk[pos + len(SEP) :]
866
+ if size == 0: # eof marker
867
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
868
+ if self._lax and chunk.startswith(b"\r"):
869
+ chunk = chunk[1:]
870
+ else:
871
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
872
+ self._chunk_size = size
873
+ self.payload.begin_http_chunk_receiving()
874
+ else:
875
+ self._chunk_tail = chunk
876
+ return False, b""
877
+
878
+ # read chunk and feed buffer
879
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
880
+ required = self._chunk_size
881
+ chunk_len = len(chunk)
882
+
883
+ if required > chunk_len:
884
+ self._chunk_size = required - chunk_len
885
+ self.payload.feed_data(chunk, chunk_len)
886
+ return False, b""
887
+ else:
888
+ self._chunk_size = 0
889
+ self.payload.feed_data(chunk[:required], required)
890
+ chunk = chunk[required:]
891
+ if self._lax and chunk.startswith(b"\r"):
892
+ chunk = chunk[1:]
893
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
894
+ self.payload.end_http_chunk_receiving()
895
+
896
+ # toss the CRLF at the end of the chunk
897
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
898
+ if chunk[: len(SEP)] == SEP:
899
+ chunk = chunk[len(SEP) :]
900
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
901
+ else:
902
+ self._chunk_tail = chunk
903
+ return False, b""
904
+
905
+ # if stream does not contain trailer, after 0\r\n
906
+ # we should get another \r\n otherwise
907
+ # trailers needs to be skipped until \r\n\r\n
908
+ if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
909
+ head = chunk[: len(SEP)]
910
+ if head == SEP:
911
+ # end of stream
912
+ self.payload.feed_eof()
913
+ return True, chunk[len(SEP) :]
914
+ # Both CR and LF, or only LF may not be received yet. It is
915
+ # expected that CRLF or LF will be shown at the very first
916
+ # byte next time, otherwise trailers should come. The last
917
+ # CRLF which marks the end of response might not be
918
+ # contained in the same TCP segment which delivered the
919
+ # size indicator.
920
+ if not head:
921
+ return False, b""
922
+ if head == SEP[:1]:
923
+ self._chunk_tail = head
924
+ return False, b""
925
+ self._chunk = ChunkState.PARSE_TRAILERS
926
+
927
+ # read and discard trailer up to the CRLF terminator
928
+ if self._chunk == ChunkState.PARSE_TRAILERS:
929
+ pos = chunk.find(SEP)
930
+ if pos >= 0:
931
+ chunk = chunk[pos + len(SEP) :]
932
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
933
+ else:
934
+ self._chunk_tail = chunk
935
+ return False, b""
936
+
937
+ # Read all bytes until eof
938
+ elif self._type == ParseState.PARSE_UNTIL_EOF:
939
+ self.payload.feed_data(chunk, len(chunk))
940
+
941
+ return False, b""
942
+
943
+
944
+ class DeflateBuffer:
945
+ """DeflateStream decompress stream and feed data into specified stream."""
946
+
947
+ decompressor: Any
948
+
949
+ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
950
+ self.out = out
951
+ self.size = 0
952
+ self.encoding = encoding
953
+ self._started_decoding = False
954
+
955
+ self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
956
+ if encoding == "br":
957
+ if not HAS_BROTLI: # pragma: no cover
958
+ raise ContentEncodingError(
959
+ "Can not decode content-encoding: brotli (br). "
960
+ "Please install `Brotli`"
961
+ )
962
+ self.decompressor = BrotliDecompressor()
963
+ else:
964
+ self.decompressor = ZLibDecompressor(encoding=encoding)
965
+
966
+ def set_exception(
967
+ self,
968
+ exc: BaseException,
969
+ exc_cause: BaseException = _EXC_SENTINEL,
970
+ ) -> None:
971
+ set_exception(self.out, exc, exc_cause)
972
+
973
+ def feed_data(self, chunk: bytes, size: int) -> None:
974
+ if not size:
975
+ return
976
+
977
+ self.size += size
978
+
979
+ # RFC1950
980
+ # bits 0..3 = CM = 0b1000 = 8 = "deflate"
981
+ # bits 4..7 = CINFO = 1..7 = windows size.
982
+ if (
983
+ not self._started_decoding
984
+ and self.encoding == "deflate"
985
+ and chunk[0] & 0xF != 8
986
+ ):
987
+ # Change the decoder to decompress incorrectly compressed data
988
+ # Actually we should issue a warning about non-RFC-compliant data.
989
+ self.decompressor = ZLibDecompressor(
990
+ encoding=self.encoding, suppress_deflate_header=True
991
+ )
992
+
993
+ try:
994
+ chunk = self.decompressor.decompress_sync(chunk)
995
+ except Exception:
996
+ raise ContentEncodingError(
997
+ "Can not decode content-encoding: %s" % self.encoding
998
+ )
999
+
1000
+ self._started_decoding = True
1001
+
1002
+ if chunk:
1003
+ self.out.feed_data(chunk, len(chunk))
1004
+
1005
+ def feed_eof(self) -> None:
1006
+ chunk = self.decompressor.flush()
1007
+
1008
+ if chunk or self.size > 0:
1009
+ self.out.feed_data(chunk, len(chunk))
1010
+ if self.encoding == "deflate" and not self.decompressor.eof:
1011
+ raise ContentEncodingError("deflate")
1012
+
1013
+ self.out.feed_eof()
1014
+
1015
+ def begin_http_chunk_receiving(self) -> None:
1016
+ self.out.begin_http_chunk_receiving()
1017
+
1018
+ def end_http_chunk_receiving(self) -> None:
1019
+ self.out.end_http_chunk_receiving()
1020
+
1021
+
1022
+ HttpRequestParserPy = HttpRequestParser
1023
+ HttpResponseParserPy = HttpResponseParser
1024
+ RawRequestMessagePy = RawRequestMessage
1025
+ RawResponseMessagePy = RawResponseMessage
1026
+
1027
+ try:
1028
+ if not NO_EXTENSIONS:
1029
+ from ._http_parser import ( # type: ignore[import-not-found,no-redef]
1030
+ HttpRequestParser,
1031
+ HttpResponseParser,
1032
+ RawRequestMessage,
1033
+ RawResponseMessage,
1034
+ )
1035
+
1036
+ HttpRequestParserC = HttpRequestParser
1037
+ HttpResponseParserC = HttpResponseParser
1038
+ RawRequestMessageC = RawRequestMessage
1039
+ RawResponseMessageC = RawResponseMessage
1040
+ except ImportError: # pragma: no cover
1041
+ pass
env-llmeval/lib/python3.10/site-packages/aiohttp/http_websocket.py ADDED
@@ -0,0 +1,740 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """WebSocket protocol versions 13 and 8."""
2
+
3
+ import asyncio
4
+ import functools
5
+ import json
6
+ import random
7
+ import re
8
+ import sys
9
+ import zlib
10
+ from enum import IntEnum
11
+ from struct import Struct
12
+ from typing import (
13
+ Any,
14
+ Callable,
15
+ Final,
16
+ List,
17
+ NamedTuple,
18
+ Optional,
19
+ Pattern,
20
+ Set,
21
+ Tuple,
22
+ Union,
23
+ cast,
24
+ )
25
+
26
+ from .base_protocol import BaseProtocol
27
+ from .compression_utils import ZLibCompressor, ZLibDecompressor
28
+ from .helpers import NO_EXTENSIONS, set_exception
29
+ from .streams import DataQueue
30
+
31
+ __all__ = (
32
+ "WS_CLOSED_MESSAGE",
33
+ "WS_CLOSING_MESSAGE",
34
+ "WS_KEY",
35
+ "WebSocketReader",
36
+ "WebSocketWriter",
37
+ "WSMessage",
38
+ "WebSocketError",
39
+ "WSMsgType",
40
+ "WSCloseCode",
41
+ )
42
+
43
+
44
+ class WSCloseCode(IntEnum):
45
+ OK = 1000
46
+ GOING_AWAY = 1001
47
+ PROTOCOL_ERROR = 1002
48
+ UNSUPPORTED_DATA = 1003
49
+ ABNORMAL_CLOSURE = 1006
50
+ INVALID_TEXT = 1007
51
+ POLICY_VIOLATION = 1008
52
+ MESSAGE_TOO_BIG = 1009
53
+ MANDATORY_EXTENSION = 1010
54
+ INTERNAL_ERROR = 1011
55
+ SERVICE_RESTART = 1012
56
+ TRY_AGAIN_LATER = 1013
57
+ BAD_GATEWAY = 1014
58
+
59
+
60
+ ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
61
+
62
+ # For websockets, keeping latency low is extremely important as implementations
63
+ # generally expect to be able to send and receive messages quickly. We use a
64
+ # larger chunk size than the default to reduce the number of executor calls
65
+ # since the executor is a significant source of latency and overhead when
66
+ # the chunks are small. A size of 5KiB was chosen because it is also the
67
+ # same value python-zlib-ng choose to use as the threshold to release the GIL.
68
+
69
+ WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024
70
+
71
+
72
+ class WSMsgType(IntEnum):
73
+ # websocket spec types
74
+ CONTINUATION = 0x0
75
+ TEXT = 0x1
76
+ BINARY = 0x2
77
+ PING = 0x9
78
+ PONG = 0xA
79
+ CLOSE = 0x8
80
+
81
+ # aiohttp specific types
82
+ CLOSING = 0x100
83
+ CLOSED = 0x101
84
+ ERROR = 0x102
85
+
86
+ text = TEXT
87
+ binary = BINARY
88
+ ping = PING
89
+ pong = PONG
90
+ close = CLOSE
91
+ closing = CLOSING
92
+ closed = CLOSED
93
+ error = ERROR
94
+
95
+
96
+ WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
97
+
98
+
99
+ UNPACK_LEN2 = Struct("!H").unpack_from
100
+ UNPACK_LEN3 = Struct("!Q").unpack_from
101
+ UNPACK_CLOSE_CODE = Struct("!H").unpack
102
+ PACK_LEN1 = Struct("!BB").pack
103
+ PACK_LEN2 = Struct("!BBH").pack
104
+ PACK_LEN3 = Struct("!BBQ").pack
105
+ PACK_CLOSE_CODE = Struct("!H").pack
106
+ MSG_SIZE: Final[int] = 2**14
107
+ DEFAULT_LIMIT: Final[int] = 2**16
108
+
109
+
110
+ class WSMessage(NamedTuple):
111
+ type: WSMsgType
112
+ # To type correctly, this would need some kind of tagged union for each type.
113
+ data: Any
114
+ extra: Optional[str]
115
+
116
+ def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
117
+ """Return parsed JSON data.
118
+
119
+ .. versionadded:: 0.22
120
+ """
121
+ return loads(self.data)
122
+
123
+
124
+ WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
125
+ WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
126
+
127
+
128
+ class WebSocketError(Exception):
129
+ """WebSocket protocol parser error."""
130
+
131
+ def __init__(self, code: int, message: str) -> None:
132
+ self.code = code
133
+ super().__init__(code, message)
134
+
135
+ def __str__(self) -> str:
136
+ return cast(str, self.args[1])
137
+
138
+
139
+ class WSHandshakeError(Exception):
140
+ """WebSocket protocol handshake error."""
141
+
142
+
143
+ native_byteorder: Final[str] = sys.byteorder
144
+
145
+
146
+ # Used by _websocket_mask_python
147
+ @functools.lru_cache
148
+ def _xor_table() -> List[bytes]:
149
+ return [bytes(a ^ b for a in range(256)) for b in range(256)]
150
+
151
+
152
+ def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
153
+ """Websocket masking function.
154
+
155
+ `mask` is a `bytes` object of length 4; `data` is a `bytearray`
156
+ object of any length. The contents of `data` are masked with `mask`,
157
+ as specified in section 5.3 of RFC 6455.
158
+
159
+ Note that this function mutates the `data` argument.
160
+
161
+ This pure-python implementation may be replaced by an optimized
162
+ version when available.
163
+
164
+ """
165
+ assert isinstance(data, bytearray), data
166
+ assert len(mask) == 4, mask
167
+
168
+ if data:
169
+ _XOR_TABLE = _xor_table()
170
+ a, b, c, d = (_XOR_TABLE[n] for n in mask)
171
+ data[::4] = data[::4].translate(a)
172
+ data[1::4] = data[1::4].translate(b)
173
+ data[2::4] = data[2::4].translate(c)
174
+ data[3::4] = data[3::4].translate(d)
175
+
176
+
177
+ if NO_EXTENSIONS: # pragma: no cover
178
+ _websocket_mask = _websocket_mask_python
179
+ else:
180
+ try:
181
+ from ._websocket import _websocket_mask_cython # type: ignore[import-not-found]
182
+
183
+ _websocket_mask = _websocket_mask_cython
184
+ except ImportError: # pragma: no cover
185
+ _websocket_mask = _websocket_mask_python
186
+
187
+ _WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
188
+
189
+
190
+ _WS_EXT_RE: Final[Pattern[str]] = re.compile(
191
+ r"^(?:;\s*(?:"
192
+ r"(server_no_context_takeover)|"
193
+ r"(client_no_context_takeover)|"
194
+ r"(server_max_window_bits(?:=(\d+))?)|"
195
+ r"(client_max_window_bits(?:=(\d+))?)))*$"
196
+ )
197
+
198
+ _WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
199
+
200
+
201
+ def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
202
+ if not extstr:
203
+ return 0, False
204
+
205
+ compress = 0
206
+ notakeover = False
207
+ for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
208
+ defext = ext.group(1)
209
+ # Return compress = 15 when get `permessage-deflate`
210
+ if not defext:
211
+ compress = 15
212
+ break
213
+ match = _WS_EXT_RE.match(defext)
214
+ if match:
215
+ compress = 15
216
+ if isserver:
217
+ # Server never fail to detect compress handshake.
218
+ # Server does not need to send max wbit to client
219
+ if match.group(4):
220
+ compress = int(match.group(4))
221
+ # Group3 must match if group4 matches
222
+ # Compress wbit 8 does not support in zlib
223
+ # If compress level not support,
224
+ # CONTINUE to next extension
225
+ if compress > 15 or compress < 9:
226
+ compress = 0
227
+ continue
228
+ if match.group(1):
229
+ notakeover = True
230
+ # Ignore regex group 5 & 6 for client_max_window_bits
231
+ break
232
+ else:
233
+ if match.group(6):
234
+ compress = int(match.group(6))
235
+ # Group5 must match if group6 matches
236
+ # Compress wbit 8 does not support in zlib
237
+ # If compress level not support,
238
+ # FAIL the parse progress
239
+ if compress > 15 or compress < 9:
240
+ raise WSHandshakeError("Invalid window size")
241
+ if match.group(2):
242
+ notakeover = True
243
+ # Ignore regex group 5 & 6 for client_max_window_bits
244
+ break
245
+ # Return Fail if client side and not match
246
+ elif not isserver:
247
+ raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
248
+
249
+ return compress, notakeover
250
+
251
+
252
+ def ws_ext_gen(
253
+ compress: int = 15, isserver: bool = False, server_notakeover: bool = False
254
+ ) -> str:
255
+ # client_notakeover=False not used for server
256
+ # compress wbit 8 does not support in zlib
257
+ if compress < 9 or compress > 15:
258
+ raise ValueError(
259
+ "Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
260
+ )
261
+ enabledext = ["permessage-deflate"]
262
+ if not isserver:
263
+ enabledext.append("client_max_window_bits")
264
+
265
+ if compress < 15:
266
+ enabledext.append("server_max_window_bits=" + str(compress))
267
+ if server_notakeover:
268
+ enabledext.append("server_no_context_takeover")
269
+ # if client_notakeover:
270
+ # enabledext.append('client_no_context_takeover')
271
+ return "; ".join(enabledext)
272
+
273
+
274
+ class WSParserState(IntEnum):
275
+ READ_HEADER = 1
276
+ READ_PAYLOAD_LENGTH = 2
277
+ READ_PAYLOAD_MASK = 3
278
+ READ_PAYLOAD = 4
279
+
280
+
281
+ class WebSocketReader:
282
+ def __init__(
283
+ self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
284
+ ) -> None:
285
+ self.queue = queue
286
+ self._max_msg_size = max_msg_size
287
+
288
+ self._exc: Optional[BaseException] = None
289
+ self._partial = bytearray()
290
+ self._state = WSParserState.READ_HEADER
291
+
292
+ self._opcode: Optional[int] = None
293
+ self._frame_fin = False
294
+ self._frame_opcode: Optional[int] = None
295
+ self._frame_payload = bytearray()
296
+
297
+ self._tail = b""
298
+ self._has_mask = False
299
+ self._frame_mask: Optional[bytes] = None
300
+ self._payload_length = 0
301
+ self._payload_length_flag = 0
302
+ self._compressed: Optional[bool] = None
303
+ self._decompressobj: Optional[ZLibDecompressor] = None
304
+ self._compress = compress
305
+
306
+ def feed_eof(self) -> None:
307
+ self.queue.feed_eof()
308
+
309
+ def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
310
+ if self._exc:
311
+ return True, data
312
+
313
+ try:
314
+ return self._feed_data(data)
315
+ except Exception as exc:
316
+ self._exc = exc
317
+ set_exception(self.queue, exc)
318
+ return True, b""
319
+
320
+ def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
321
+ for fin, opcode, payload, compressed in self.parse_frame(data):
322
+ if compressed and not self._decompressobj:
323
+ self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)
324
+ if opcode == WSMsgType.CLOSE:
325
+ if len(payload) >= 2:
326
+ close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
327
+ if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
328
+ raise WebSocketError(
329
+ WSCloseCode.PROTOCOL_ERROR,
330
+ f"Invalid close code: {close_code}",
331
+ )
332
+ try:
333
+ close_message = payload[2:].decode("utf-8")
334
+ except UnicodeDecodeError as exc:
335
+ raise WebSocketError(
336
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
337
+ ) from exc
338
+ msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
339
+ elif payload:
340
+ raise WebSocketError(
341
+ WSCloseCode.PROTOCOL_ERROR,
342
+ f"Invalid close frame: {fin} {opcode} {payload!r}",
343
+ )
344
+ else:
345
+ msg = WSMessage(WSMsgType.CLOSE, 0, "")
346
+
347
+ self.queue.feed_data(msg, 0)
348
+
349
+ elif opcode == WSMsgType.PING:
350
+ self.queue.feed_data(
351
+ WSMessage(WSMsgType.PING, payload, ""), len(payload)
352
+ )
353
+
354
+ elif opcode == WSMsgType.PONG:
355
+ self.queue.feed_data(
356
+ WSMessage(WSMsgType.PONG, payload, ""), len(payload)
357
+ )
358
+
359
+ elif (
360
+ opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
361
+ and self._opcode is None
362
+ ):
363
+ raise WebSocketError(
364
+ WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
365
+ )
366
+ else:
367
+ # load text/binary
368
+ if not fin:
369
+ # got partial frame payload
370
+ if opcode != WSMsgType.CONTINUATION:
371
+ self._opcode = opcode
372
+ self._partial.extend(payload)
373
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
374
+ raise WebSocketError(
375
+ WSCloseCode.MESSAGE_TOO_BIG,
376
+ "Message size {} exceeds limit {}".format(
377
+ len(self._partial), self._max_msg_size
378
+ ),
379
+ )
380
+ else:
381
+ # previous frame was non finished
382
+ # we should get continuation opcode
383
+ if self._partial:
384
+ if opcode != WSMsgType.CONTINUATION:
385
+ raise WebSocketError(
386
+ WSCloseCode.PROTOCOL_ERROR,
387
+ "The opcode in non-fin frame is expected "
388
+ "to be zero, got {!r}".format(opcode),
389
+ )
390
+
391
+ if opcode == WSMsgType.CONTINUATION:
392
+ assert self._opcode is not None
393
+ opcode = self._opcode
394
+ self._opcode = None
395
+
396
+ self._partial.extend(payload)
397
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
398
+ raise WebSocketError(
399
+ WSCloseCode.MESSAGE_TOO_BIG,
400
+ "Message size {} exceeds limit {}".format(
401
+ len(self._partial), self._max_msg_size
402
+ ),
403
+ )
404
+
405
+ # Decompress process must to be done after all packets
406
+ # received.
407
+ if compressed:
408
+ assert self._decompressobj is not None
409
+ self._partial.extend(_WS_DEFLATE_TRAILING)
410
+ payload_merged = self._decompressobj.decompress_sync(
411
+ self._partial, self._max_msg_size
412
+ )
413
+ if self._decompressobj.unconsumed_tail:
414
+ left = len(self._decompressobj.unconsumed_tail)
415
+ raise WebSocketError(
416
+ WSCloseCode.MESSAGE_TOO_BIG,
417
+ "Decompressed message size {} exceeds limit {}".format(
418
+ self._max_msg_size + left, self._max_msg_size
419
+ ),
420
+ )
421
+ else:
422
+ payload_merged = bytes(self._partial)
423
+
424
+ self._partial.clear()
425
+
426
+ if opcode == WSMsgType.TEXT:
427
+ try:
428
+ text = payload_merged.decode("utf-8")
429
+ self.queue.feed_data(
430
+ WSMessage(WSMsgType.TEXT, text, ""), len(text)
431
+ )
432
+ except UnicodeDecodeError as exc:
433
+ raise WebSocketError(
434
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
435
+ ) from exc
436
+ else:
437
+ self.queue.feed_data(
438
+ WSMessage(WSMsgType.BINARY, payload_merged, ""),
439
+ len(payload_merged),
440
+ )
441
+
442
+ return False, b""
443
+
444
+ def parse_frame(
445
+ self, buf: bytes
446
+ ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
447
+ """Return the next frame from the socket."""
448
+ frames = []
449
+ if self._tail:
450
+ buf, self._tail = self._tail + buf, b""
451
+
452
+ start_pos = 0
453
+ buf_length = len(buf)
454
+
455
+ while True:
456
+ # read header
457
+ if self._state == WSParserState.READ_HEADER:
458
+ if buf_length - start_pos >= 2:
459
+ data = buf[start_pos : start_pos + 2]
460
+ start_pos += 2
461
+ first_byte, second_byte = data
462
+
463
+ fin = (first_byte >> 7) & 1
464
+ rsv1 = (first_byte >> 6) & 1
465
+ rsv2 = (first_byte >> 5) & 1
466
+ rsv3 = (first_byte >> 4) & 1
467
+ opcode = first_byte & 0xF
468
+
469
+ # frame-fin = %x0 ; more frames of this message follow
470
+ # / %x1 ; final frame of this message
471
+ # frame-rsv1 = %x0 ;
472
+ # 1 bit, MUST be 0 unless negotiated otherwise
473
+ # frame-rsv2 = %x0 ;
474
+ # 1 bit, MUST be 0 unless negotiated otherwise
475
+ # frame-rsv3 = %x0 ;
476
+ # 1 bit, MUST be 0 unless negotiated otherwise
477
+ #
478
+ # Remove rsv1 from this test for deflate development
479
+ if rsv2 or rsv3 or (rsv1 and not self._compress):
480
+ raise WebSocketError(
481
+ WSCloseCode.PROTOCOL_ERROR,
482
+ "Received frame with non-zero reserved bits",
483
+ )
484
+
485
+ if opcode > 0x7 and fin == 0:
486
+ raise WebSocketError(
487
+ WSCloseCode.PROTOCOL_ERROR,
488
+ "Received fragmented control frame",
489
+ )
490
+
491
+ has_mask = (second_byte >> 7) & 1
492
+ length = second_byte & 0x7F
493
+
494
+ # Control frames MUST have a payload
495
+ # length of 125 bytes or less
496
+ if opcode > 0x7 and length > 125:
497
+ raise WebSocketError(
498
+ WSCloseCode.PROTOCOL_ERROR,
499
+ "Control frame payload cannot be " "larger than 125 bytes",
500
+ )
501
+
502
+ # Set compress status if last package is FIN
503
+ # OR set compress status if this is first fragment
504
+ # Raise error if not first fragment with rsv1 = 0x1
505
+ if self._frame_fin or self._compressed is None:
506
+ self._compressed = True if rsv1 else False
507
+ elif rsv1:
508
+ raise WebSocketError(
509
+ WSCloseCode.PROTOCOL_ERROR,
510
+ "Received frame with non-zero reserved bits",
511
+ )
512
+
513
+ self._frame_fin = bool(fin)
514
+ self._frame_opcode = opcode
515
+ self._has_mask = bool(has_mask)
516
+ self._payload_length_flag = length
517
+ self._state = WSParserState.READ_PAYLOAD_LENGTH
518
+ else:
519
+ break
520
+
521
+ # read payload length
522
+ if self._state == WSParserState.READ_PAYLOAD_LENGTH:
523
+ length = self._payload_length_flag
524
+ if length == 126:
525
+ if buf_length - start_pos >= 2:
526
+ data = buf[start_pos : start_pos + 2]
527
+ start_pos += 2
528
+ length = UNPACK_LEN2(data)[0]
529
+ self._payload_length = length
530
+ self._state = (
531
+ WSParserState.READ_PAYLOAD_MASK
532
+ if self._has_mask
533
+ else WSParserState.READ_PAYLOAD
534
+ )
535
+ else:
536
+ break
537
+ elif length > 126:
538
+ if buf_length - start_pos >= 8:
539
+ data = buf[start_pos : start_pos + 8]
540
+ start_pos += 8
541
+ length = UNPACK_LEN3(data)[0]
542
+ self._payload_length = length
543
+ self._state = (
544
+ WSParserState.READ_PAYLOAD_MASK
545
+ if self._has_mask
546
+ else WSParserState.READ_PAYLOAD
547
+ )
548
+ else:
549
+ break
550
+ else:
551
+ self._payload_length = length
552
+ self._state = (
553
+ WSParserState.READ_PAYLOAD_MASK
554
+ if self._has_mask
555
+ else WSParserState.READ_PAYLOAD
556
+ )
557
+
558
+ # read payload mask
559
+ if self._state == WSParserState.READ_PAYLOAD_MASK:
560
+ if buf_length - start_pos >= 4:
561
+ self._frame_mask = buf[start_pos : start_pos + 4]
562
+ start_pos += 4
563
+ self._state = WSParserState.READ_PAYLOAD
564
+ else:
565
+ break
566
+
567
+ if self._state == WSParserState.READ_PAYLOAD:
568
+ length = self._payload_length
569
+ payload = self._frame_payload
570
+
571
+ chunk_len = buf_length - start_pos
572
+ if length >= chunk_len:
573
+ self._payload_length = length - chunk_len
574
+ payload.extend(buf[start_pos:])
575
+ start_pos = buf_length
576
+ else:
577
+ self._payload_length = 0
578
+ payload.extend(buf[start_pos : start_pos + length])
579
+ start_pos = start_pos + length
580
+
581
+ if self._payload_length == 0:
582
+ if self._has_mask:
583
+ assert self._frame_mask is not None
584
+ _websocket_mask(self._frame_mask, payload)
585
+
586
+ frames.append(
587
+ (self._frame_fin, self._frame_opcode, payload, self._compressed)
588
+ )
589
+
590
+ self._frame_payload = bytearray()
591
+ self._state = WSParserState.READ_HEADER
592
+ else:
593
+ break
594
+
595
+ self._tail = buf[start_pos:]
596
+
597
+ return frames
598
+
599
+
600
+ class WebSocketWriter:
601
+ def __init__(
602
+ self,
603
+ protocol: BaseProtocol,
604
+ transport: asyncio.Transport,
605
+ *,
606
+ use_mask: bool = False,
607
+ limit: int = DEFAULT_LIMIT,
608
+ random: random.Random = random.Random(),
609
+ compress: int = 0,
610
+ notakeover: bool = False,
611
+ ) -> None:
612
+ self.protocol = protocol
613
+ self.transport = transport
614
+ self.use_mask = use_mask
615
+ self.randrange = random.randrange
616
+ self.compress = compress
617
+ self.notakeover = notakeover
618
+ self._closing = False
619
+ self._limit = limit
620
+ self._output_size = 0
621
+ self._compressobj: Any = None # actually compressobj
622
+
623
+ async def _send_frame(
624
+ self, message: bytes, opcode: int, compress: Optional[int] = None
625
+ ) -> None:
626
+ """Send a frame over the websocket with message as its payload."""
627
+ if self._closing and not (opcode & WSMsgType.CLOSE):
628
+ raise ConnectionResetError("Cannot write to closing transport")
629
+
630
+ rsv = 0
631
+
632
+ # Only compress larger packets (disabled)
633
+ # Does small packet needs to be compressed?
634
+ # if self.compress and opcode < 8 and len(message) > 124:
635
+ if (compress or self.compress) and opcode < 8:
636
+ if compress:
637
+ # Do not set self._compress if compressing is for this frame
638
+ compressobj = self._make_compress_obj(compress)
639
+ else: # self.compress
640
+ if not self._compressobj:
641
+ self._compressobj = self._make_compress_obj(self.compress)
642
+ compressobj = self._compressobj
643
+
644
+ message = await compressobj.compress(message)
645
+ # Its critical that we do not return control to the event
646
+ # loop until we have finished sending all the compressed
647
+ # data. Otherwise we could end up mixing compressed frames
648
+ # if there are multiple coroutines compressing data.
649
+ message += compressobj.flush(
650
+ zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
651
+ )
652
+ if message.endswith(_WS_DEFLATE_TRAILING):
653
+ message = message[:-4]
654
+ rsv = rsv | 0x40
655
+
656
+ msg_length = len(message)
657
+
658
+ use_mask = self.use_mask
659
+ if use_mask:
660
+ mask_bit = 0x80
661
+ else:
662
+ mask_bit = 0
663
+
664
+ if msg_length < 126:
665
+ header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
666
+ elif msg_length < (1 << 16):
667
+ header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
668
+ else:
669
+ header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
670
+ if use_mask:
671
+ mask_int = self.randrange(0, 0xFFFFFFFF)
672
+ mask = mask_int.to_bytes(4, "big")
673
+ message = bytearray(message)
674
+ _websocket_mask(mask, message)
675
+ self._write(header + mask + message)
676
+ self._output_size += len(header) + len(mask) + msg_length
677
+ else:
678
+ if msg_length > MSG_SIZE:
679
+ self._write(header)
680
+ self._write(message)
681
+ else:
682
+ self._write(header + message)
683
+
684
+ self._output_size += len(header) + msg_length
685
+
686
+ # It is safe to return control to the event loop when using compression
687
+ # after this point as we have already sent or buffered all the data.
688
+
689
+ if self._output_size > self._limit:
690
+ self._output_size = 0
691
+ await self.protocol._drain_helper()
692
+
693
+ def _make_compress_obj(self, compress: int) -> ZLibCompressor:
694
+ return ZLibCompressor(
695
+ level=zlib.Z_BEST_SPEED,
696
+ wbits=-compress,
697
+ max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
698
+ )
699
+
700
+ def _write(self, data: bytes) -> None:
701
+ if self.transport is None or self.transport.is_closing():
702
+ raise ConnectionResetError("Cannot write to closing transport")
703
+ self.transport.write(data)
704
+
705
+ async def pong(self, message: Union[bytes, str] = b"") -> None:
706
+ """Send pong message."""
707
+ if isinstance(message, str):
708
+ message = message.encode("utf-8")
709
+ await self._send_frame(message, WSMsgType.PONG)
710
+
711
+ async def ping(self, message: Union[bytes, str] = b"") -> None:
712
+ """Send ping message."""
713
+ if isinstance(message, str):
714
+ message = message.encode("utf-8")
715
+ await self._send_frame(message, WSMsgType.PING)
716
+
717
+ async def send(
718
+ self,
719
+ message: Union[str, bytes],
720
+ binary: bool = False,
721
+ compress: Optional[int] = None,
722
+ ) -> None:
723
+ """Send a frame over the websocket with message as its payload."""
724
+ if isinstance(message, str):
725
+ message = message.encode("utf-8")
726
+ if binary:
727
+ await self._send_frame(message, WSMsgType.BINARY, compress)
728
+ else:
729
+ await self._send_frame(message, WSMsgType.TEXT, compress)
730
+
731
+ async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
732
+ """Close the websocket, sending the specified code and message."""
733
+ if isinstance(message, str):
734
+ message = message.encode("utf-8")
735
+ try:
736
+ await self._send_frame(
737
+ PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
738
+ )
739
+ finally:
740
+ self._closing = True
env-llmeval/lib/python3.10/site-packages/aiohttp/http_writer.py ADDED
@@ -0,0 +1,198 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Http related parsers and protocol."""
2
+
3
+ import asyncio
4
+ import zlib
5
+ from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
6
+
7
+ from multidict import CIMultiDict
8
+
9
+ from .abc import AbstractStreamWriter
10
+ from .base_protocol import BaseProtocol
11
+ from .compression_utils import ZLibCompressor
12
+ from .helpers import NO_EXTENSIONS
13
+
14
+ __all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
15
+
16
+
17
+ class HttpVersion(NamedTuple):
18
+ major: int
19
+ minor: int
20
+
21
+
22
+ HttpVersion10 = HttpVersion(1, 0)
23
+ HttpVersion11 = HttpVersion(1, 1)
24
+
25
+
26
+ _T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
27
+ _T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
28
+
29
+
30
+ class StreamWriter(AbstractStreamWriter):
31
+ def __init__(
32
+ self,
33
+ protocol: BaseProtocol,
34
+ loop: asyncio.AbstractEventLoop,
35
+ on_chunk_sent: _T_OnChunkSent = None,
36
+ on_headers_sent: _T_OnHeadersSent = None,
37
+ ) -> None:
38
+ self._protocol = protocol
39
+
40
+ self.loop = loop
41
+ self.length = None
42
+ self.chunked = False
43
+ self.buffer_size = 0
44
+ self.output_size = 0
45
+
46
+ self._eof = False
47
+ self._compress: Optional[ZLibCompressor] = None
48
+ self._drain_waiter = None
49
+
50
+ self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
51
+ self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
52
+
53
+ @property
54
+ def transport(self) -> Optional[asyncio.Transport]:
55
+ return self._protocol.transport
56
+
57
+ @property
58
+ def protocol(self) -> BaseProtocol:
59
+ return self._protocol
60
+
61
+ def enable_chunking(self) -> None:
62
+ self.chunked = True
63
+
64
+ def enable_compression(
65
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
66
+ ) -> None:
67
+ self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
68
+
69
+ def _write(self, chunk: bytes) -> None:
70
+ size = len(chunk)
71
+ self.buffer_size += size
72
+ self.output_size += size
73
+ transport = self.transport
74
+ if not self._protocol.connected or transport is None or transport.is_closing():
75
+ raise ConnectionResetError("Cannot write to closing transport")
76
+ transport.write(chunk)
77
+
78
+ async def write(
79
+ self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
80
+ ) -> None:
81
+ """Writes chunk of data to a stream.
82
+
83
+ write_eof() indicates end of stream.
84
+ writer can't be used after write_eof() method being called.
85
+ write() return drain future.
86
+ """
87
+ if self._on_chunk_sent is not None:
88
+ await self._on_chunk_sent(chunk)
89
+
90
+ if isinstance(chunk, memoryview):
91
+ if chunk.nbytes != len(chunk):
92
+ # just reshape it
93
+ chunk = chunk.cast("c")
94
+
95
+ if self._compress is not None:
96
+ chunk = await self._compress.compress(chunk)
97
+ if not chunk:
98
+ return
99
+
100
+ if self.length is not None:
101
+ chunk_len = len(chunk)
102
+ if self.length >= chunk_len:
103
+ self.length = self.length - chunk_len
104
+ else:
105
+ chunk = chunk[: self.length]
106
+ self.length = 0
107
+ if not chunk:
108
+ return
109
+
110
+ if chunk:
111
+ if self.chunked:
112
+ chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
113
+ chunk = chunk_len_pre + chunk + b"\r\n"
114
+
115
+ self._write(chunk)
116
+
117
+ if self.buffer_size > LIMIT and drain:
118
+ self.buffer_size = 0
119
+ await self.drain()
120
+
121
+ async def write_headers(
122
+ self, status_line: str, headers: "CIMultiDict[str]"
123
+ ) -> None:
124
+ """Write request/response status and headers."""
125
+ if self._on_headers_sent is not None:
126
+ await self._on_headers_sent(headers)
127
+
128
+ # status + headers
129
+ buf = _serialize_headers(status_line, headers)
130
+ self._write(buf)
131
+
132
+ async def write_eof(self, chunk: bytes = b"") -> None:
133
+ if self._eof:
134
+ return
135
+
136
+ if chunk and self._on_chunk_sent is not None:
137
+ await self._on_chunk_sent(chunk)
138
+
139
+ if self._compress:
140
+ if chunk:
141
+ chunk = await self._compress.compress(chunk)
142
+
143
+ chunk += self._compress.flush()
144
+ if chunk and self.chunked:
145
+ chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
146
+ chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
147
+ else:
148
+ if self.chunked:
149
+ if chunk:
150
+ chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
151
+ chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
152
+ else:
153
+ chunk = b"0\r\n\r\n"
154
+
155
+ if chunk:
156
+ self._write(chunk)
157
+
158
+ await self.drain()
159
+
160
+ self._eof = True
161
+
162
+ async def drain(self) -> None:
163
+ """Flush the write buffer.
164
+
165
+ The intended use is to write
166
+
167
+ await w.write(data)
168
+ await w.drain()
169
+ """
170
+ if self._protocol.transport is not None:
171
+ await self._protocol._drain_helper()
172
+
173
+
174
+ def _safe_header(string: str) -> str:
175
+ if "\r" in string or "\n" in string:
176
+ raise ValueError(
177
+ "Newline or carriage return detected in headers. "
178
+ "Potential header injection attack."
179
+ )
180
+ return string
181
+
182
+
183
+ def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
184
+ headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
185
+ line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
186
+ return line.encode("utf-8")
187
+
188
+
189
+ _serialize_headers = _py_serialize_headers
190
+
191
+ try:
192
+ import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
193
+
194
+ _c_serialize_headers = _http_writer._serialize_headers
195
+ if not NO_EXTENSIONS:
196
+ _serialize_headers = _c_serialize_headers
197
+ except ImportError:
198
+ pass
env-llmeval/lib/python3.10/site-packages/aiohttp/locks.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import collections
3
+ from typing import Any, Deque, Optional
4
+
5
+
6
+ class EventResultOrError:
7
+ """Event asyncio lock helper class.
8
+
9
+ Wraps the Event asyncio lock allowing either to awake the
10
+ locked Tasks without any error or raising an exception.
11
+
12
+ thanks to @vorpalsmith for the simple design.
13
+ """
14
+
15
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
16
+ self._loop = loop
17
+ self._exc: Optional[BaseException] = None
18
+ self._event = asyncio.Event()
19
+ self._waiters: Deque[asyncio.Future[Any]] = collections.deque()
20
+
21
+ def set(self, exc: Optional[BaseException] = None) -> None:
22
+ self._exc = exc
23
+ self._event.set()
24
+
25
+ async def wait(self) -> Any:
26
+ waiter = self._loop.create_task(self._event.wait())
27
+ self._waiters.append(waiter)
28
+ try:
29
+ val = await waiter
30
+ finally:
31
+ self._waiters.remove(waiter)
32
+
33
+ if self._exc is not None:
34
+ raise self._exc
35
+
36
+ return val
37
+
38
+ def cancel(self) -> None:
39
+ """Cancel all waiters"""
40
+ for waiter in self._waiters:
41
+ waiter.cancel()
env-llmeval/lib/python3.10/site-packages/aiohttp/log.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+
3
+ access_logger = logging.getLogger("aiohttp.access")
4
+ client_logger = logging.getLogger("aiohttp.client")
5
+ internal_logger = logging.getLogger("aiohttp.internal")
6
+ server_logger = logging.getLogger("aiohttp.server")
7
+ web_logger = logging.getLogger("aiohttp.web")
8
+ ws_logger = logging.getLogger("aiohttp.websocket")
env-llmeval/lib/python3.10/site-packages/aiohttp/multipart.py ADDED
@@ -0,0 +1,1008 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import base64
2
+ import binascii
3
+ import json
4
+ import re
5
+ import uuid
6
+ import warnings
7
+ import zlib
8
+ from collections import deque
9
+ from types import TracebackType
10
+ from typing import (
11
+ TYPE_CHECKING,
12
+ Any,
13
+ AsyncIterator,
14
+ Deque,
15
+ Dict,
16
+ Iterator,
17
+ List,
18
+ Mapping,
19
+ Optional,
20
+ Sequence,
21
+ Tuple,
22
+ Type,
23
+ Union,
24
+ cast,
25
+ )
26
+ from urllib.parse import parse_qsl, unquote, urlencode
27
+
28
+ from multidict import CIMultiDict, CIMultiDictProxy
29
+
30
+ from .compression_utils import ZLibCompressor, ZLibDecompressor
31
+ from .hdrs import (
32
+ CONTENT_DISPOSITION,
33
+ CONTENT_ENCODING,
34
+ CONTENT_LENGTH,
35
+ CONTENT_TRANSFER_ENCODING,
36
+ CONTENT_TYPE,
37
+ )
38
+ from .helpers import CHAR, TOKEN, parse_mimetype, reify
39
+ from .http import HeadersParser
40
+ from .payload import (
41
+ JsonPayload,
42
+ LookupError,
43
+ Order,
44
+ Payload,
45
+ StringPayload,
46
+ get_payload,
47
+ payload_type,
48
+ )
49
+ from .streams import StreamReader
50
+
51
+ __all__ = (
52
+ "MultipartReader",
53
+ "MultipartWriter",
54
+ "BodyPartReader",
55
+ "BadContentDispositionHeader",
56
+ "BadContentDispositionParam",
57
+ "parse_content_disposition",
58
+ "content_disposition_filename",
59
+ )
60
+
61
+
62
+ if TYPE_CHECKING:
63
+ from .client_reqrep import ClientResponse
64
+
65
+
66
+ class BadContentDispositionHeader(RuntimeWarning):
67
+ pass
68
+
69
+
70
+ class BadContentDispositionParam(RuntimeWarning):
71
+ pass
72
+
73
+
74
+ def parse_content_disposition(
75
+ header: Optional[str],
76
+ ) -> Tuple[Optional[str], Dict[str, str]]:
77
+ def is_token(string: str) -> bool:
78
+ return bool(string) and TOKEN >= set(string)
79
+
80
+ def is_quoted(string: str) -> bool:
81
+ return string[0] == string[-1] == '"'
82
+
83
+ def is_rfc5987(string: str) -> bool:
84
+ return is_token(string) and string.count("'") == 2
85
+
86
+ def is_extended_param(string: str) -> bool:
87
+ return string.endswith("*")
88
+
89
+ def is_continuous_param(string: str) -> bool:
90
+ pos = string.find("*") + 1
91
+ if not pos:
92
+ return False
93
+ substring = string[pos:-1] if string.endswith("*") else string[pos:]
94
+ return substring.isdigit()
95
+
96
+ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
97
+ return re.sub(f"\\\\([{chars}])", "\\1", text)
98
+
99
+ if not header:
100
+ return None, {}
101
+
102
+ disptype, *parts = header.split(";")
103
+ if not is_token(disptype):
104
+ warnings.warn(BadContentDispositionHeader(header))
105
+ return None, {}
106
+
107
+ params: Dict[str, str] = {}
108
+ while parts:
109
+ item = parts.pop(0)
110
+
111
+ if "=" not in item:
112
+ warnings.warn(BadContentDispositionHeader(header))
113
+ return None, {}
114
+
115
+ key, value = item.split("=", 1)
116
+ key = key.lower().strip()
117
+ value = value.lstrip()
118
+
119
+ if key in params:
120
+ warnings.warn(BadContentDispositionHeader(header))
121
+ return None, {}
122
+
123
+ if not is_token(key):
124
+ warnings.warn(BadContentDispositionParam(item))
125
+ continue
126
+
127
+ elif is_continuous_param(key):
128
+ if is_quoted(value):
129
+ value = unescape(value[1:-1])
130
+ elif not is_token(value):
131
+ warnings.warn(BadContentDispositionParam(item))
132
+ continue
133
+
134
+ elif is_extended_param(key):
135
+ if is_rfc5987(value):
136
+ encoding, _, value = value.split("'", 2)
137
+ encoding = encoding or "utf-8"
138
+ else:
139
+ warnings.warn(BadContentDispositionParam(item))
140
+ continue
141
+
142
+ try:
143
+ value = unquote(value, encoding, "strict")
144
+ except UnicodeDecodeError: # pragma: nocover
145
+ warnings.warn(BadContentDispositionParam(item))
146
+ continue
147
+
148
+ else:
149
+ failed = True
150
+ if is_quoted(value):
151
+ failed = False
152
+ value = unescape(value[1:-1].lstrip("\\/"))
153
+ elif is_token(value):
154
+ failed = False
155
+ elif parts:
156
+ # maybe just ; in filename, in any case this is just
157
+ # one case fix, for proper fix we need to redesign parser
158
+ _value = f"{value};{parts[0]}"
159
+ if is_quoted(_value):
160
+ parts.pop(0)
161
+ value = unescape(_value[1:-1].lstrip("\\/"))
162
+ failed = False
163
+
164
+ if failed:
165
+ warnings.warn(BadContentDispositionHeader(header))
166
+ return None, {}
167
+
168
+ params[key] = value
169
+
170
+ return disptype.lower(), params
171
+
172
+
173
+ def content_disposition_filename(
174
+ params: Mapping[str, str], name: str = "filename"
175
+ ) -> Optional[str]:
176
+ name_suf = "%s*" % name
177
+ if not params:
178
+ return None
179
+ elif name_suf in params:
180
+ return params[name_suf]
181
+ elif name in params:
182
+ return params[name]
183
+ else:
184
+ parts = []
185
+ fnparams = sorted(
186
+ (key, value) for key, value in params.items() if key.startswith(name_suf)
187
+ )
188
+ for num, (key, value) in enumerate(fnparams):
189
+ _, tail = key.split("*", 1)
190
+ if tail.endswith("*"):
191
+ tail = tail[:-1]
192
+ if tail == str(num):
193
+ parts.append(value)
194
+ else:
195
+ break
196
+ if not parts:
197
+ return None
198
+ value = "".join(parts)
199
+ if "'" in value:
200
+ encoding, _, value = value.split("'", 2)
201
+ encoding = encoding or "utf-8"
202
+ return unquote(value, encoding, "strict")
203
+ return value
204
+
205
+
206
+ class MultipartResponseWrapper:
207
+ """Wrapper around the MultipartReader.
208
+
209
+ It takes care about
210
+ underlying connection and close it when it needs in.
211
+ """
212
+
213
+ def __init__(
214
+ self,
215
+ resp: "ClientResponse",
216
+ stream: "MultipartReader",
217
+ ) -> None:
218
+ self.resp = resp
219
+ self.stream = stream
220
+
221
+ def __aiter__(self) -> "MultipartResponseWrapper":
222
+ return self
223
+
224
+ async def __anext__(
225
+ self,
226
+ ) -> Union["MultipartReader", "BodyPartReader"]:
227
+ part = await self.next()
228
+ if part is None:
229
+ raise StopAsyncIteration
230
+ return part
231
+
232
+ def at_eof(self) -> bool:
233
+ """Returns True when all response data had been read."""
234
+ return self.resp.content.at_eof()
235
+
236
+ async def next(
237
+ self,
238
+ ) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
239
+ """Emits next multipart reader object."""
240
+ item = await self.stream.next()
241
+ if self.stream.at_eof():
242
+ await self.release()
243
+ return item
244
+
245
+ async def release(self) -> None:
246
+ """Release the connection gracefully.
247
+
248
+ All remaining content is read to the void.
249
+ """
250
+ await self.resp.release()
251
+
252
+
253
+ class BodyPartReader:
254
+ """Multipart reader for single body part."""
255
+
256
+ chunk_size = 8192
257
+
258
+ def __init__(
259
+ self,
260
+ boundary: bytes,
261
+ headers: "CIMultiDictProxy[str]",
262
+ content: StreamReader,
263
+ *,
264
+ subtype: str = "mixed",
265
+ default_charset: Optional[str] = None,
266
+ ) -> None:
267
+ self.headers = headers
268
+ self._boundary = boundary
269
+ self._content = content
270
+ self._default_charset = default_charset
271
+ self._at_eof = False
272
+ self._is_form_data = subtype == "form-data"
273
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
274
+ length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None)
275
+ self._length = int(length) if length is not None else None
276
+ self._read_bytes = 0
277
+ self._unread: Deque[bytes] = deque()
278
+ self._prev_chunk: Optional[bytes] = None
279
+ self._content_eof = 0
280
+ self._cache: Dict[str, Any] = {}
281
+
282
+ def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
283
+ return self # type: ignore[return-value]
284
+
285
+ async def __anext__(self) -> bytes:
286
+ part = await self.next()
287
+ if part is None:
288
+ raise StopAsyncIteration
289
+ return part
290
+
291
+ async def next(self) -> Optional[bytes]:
292
+ item = await self.read()
293
+ if not item:
294
+ return None
295
+ return item
296
+
297
+ async def read(self, *, decode: bool = False) -> bytes:
298
+ """Reads body part data.
299
+
300
+ decode: Decodes data following by encoding
301
+ method from Content-Encoding header. If it missed
302
+ data remains untouched
303
+ """
304
+ if self._at_eof:
305
+ return b""
306
+ data = bytearray()
307
+ while not self._at_eof:
308
+ data.extend(await self.read_chunk(self.chunk_size))
309
+ if decode:
310
+ return self.decode(data)
311
+ return data
312
+
313
+ async def read_chunk(self, size: int = chunk_size) -> bytes:
314
+ """Reads body part content chunk of the specified size.
315
+
316
+ size: chunk size
317
+ """
318
+ if self._at_eof:
319
+ return b""
320
+ if self._length:
321
+ chunk = await self._read_chunk_from_length(size)
322
+ else:
323
+ chunk = await self._read_chunk_from_stream(size)
324
+
325
+ self._read_bytes += len(chunk)
326
+ if self._read_bytes == self._length:
327
+ self._at_eof = True
328
+ if self._at_eof:
329
+ clrf = await self._content.readline()
330
+ assert (
331
+ b"\r\n" == clrf
332
+ ), "reader did not read all the data or it is malformed"
333
+ return chunk
334
+
335
+ async def _read_chunk_from_length(self, size: int) -> bytes:
336
+ # Reads body part content chunk of the specified size.
337
+ # The body part must has Content-Length header with proper value.
338
+ assert self._length is not None, "Content-Length required for chunked read"
339
+ chunk_size = min(size, self._length - self._read_bytes)
340
+ chunk = await self._content.read(chunk_size)
341
+ if self._content.at_eof():
342
+ self._at_eof = True
343
+ return chunk
344
+
345
+ async def _read_chunk_from_stream(self, size: int) -> bytes:
346
+ # Reads content chunk of body part with unknown length.
347
+ # The Content-Length header for body part is not necessary.
348
+ assert (
349
+ size >= len(self._boundary) + 2
350
+ ), "Chunk size must be greater or equal than boundary length + 2"
351
+ first_chunk = self._prev_chunk is None
352
+ if first_chunk:
353
+ self._prev_chunk = await self._content.read(size)
354
+
355
+ chunk = await self._content.read(size)
356
+ self._content_eof += int(self._content.at_eof())
357
+ assert self._content_eof < 3, "Reading after EOF"
358
+ assert self._prev_chunk is not None
359
+ window = self._prev_chunk + chunk
360
+ sub = b"\r\n" + self._boundary
361
+ if first_chunk:
362
+ idx = window.find(sub)
363
+ else:
364
+ idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
365
+ if idx >= 0:
366
+ # pushing boundary back to content
367
+ with warnings.catch_warnings():
368
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
369
+ self._content.unread_data(window[idx:])
370
+ if size > idx:
371
+ self._prev_chunk = self._prev_chunk[:idx]
372
+ chunk = window[len(self._prev_chunk) : idx]
373
+ if not chunk:
374
+ self._at_eof = True
375
+ result = self._prev_chunk
376
+ self._prev_chunk = chunk
377
+ return result
378
+
379
+ async def readline(self) -> bytes:
380
+ """Reads body part by line by line."""
381
+ if self._at_eof:
382
+ return b""
383
+
384
+ if self._unread:
385
+ line = self._unread.popleft()
386
+ else:
387
+ line = await self._content.readline()
388
+
389
+ if line.startswith(self._boundary):
390
+ # the very last boundary may not come with \r\n,
391
+ # so set single rules for everyone
392
+ sline = line.rstrip(b"\r\n")
393
+ boundary = self._boundary
394
+ last_boundary = self._boundary + b"--"
395
+ # ensure that we read exactly the boundary, not something alike
396
+ if sline == boundary or sline == last_boundary:
397
+ self._at_eof = True
398
+ self._unread.append(line)
399
+ return b""
400
+ else:
401
+ next_line = await self._content.readline()
402
+ if next_line.startswith(self._boundary):
403
+ line = line[:-2] # strip CRLF but only once
404
+ self._unread.append(next_line)
405
+
406
+ return line
407
+
408
+ async def release(self) -> None:
409
+ """Like read(), but reads all the data to the void."""
410
+ if self._at_eof:
411
+ return
412
+ while not self._at_eof:
413
+ await self.read_chunk(self.chunk_size)
414
+
415
+ async def text(self, *, encoding: Optional[str] = None) -> str:
416
+ """Like read(), but assumes that body part contains text data."""
417
+ data = await self.read(decode=True)
418
+ # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm
419
+ # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send
420
+ encoding = encoding or self.get_charset(default="utf-8")
421
+ return data.decode(encoding)
422
+
423
+ async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
424
+ """Like read(), but assumes that body parts contains JSON data."""
425
+ data = await self.read(decode=True)
426
+ if not data:
427
+ return None
428
+ encoding = encoding or self.get_charset(default="utf-8")
429
+ return cast(Dict[str, Any], json.loads(data.decode(encoding)))
430
+
431
+ async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
432
+ """Like read(), but assumes that body parts contain form urlencoded data."""
433
+ data = await self.read(decode=True)
434
+ if not data:
435
+ return []
436
+ if encoding is not None:
437
+ real_encoding = encoding
438
+ else:
439
+ real_encoding = self.get_charset(default="utf-8")
440
+ try:
441
+ decoded_data = data.rstrip().decode(real_encoding)
442
+ except UnicodeDecodeError:
443
+ raise ValueError("data cannot be decoded with %s encoding" % real_encoding)
444
+
445
+ return parse_qsl(
446
+ decoded_data,
447
+ keep_blank_values=True,
448
+ encoding=real_encoding,
449
+ )
450
+
451
+ def at_eof(self) -> bool:
452
+ """Returns True if the boundary was reached or False otherwise."""
453
+ return self._at_eof
454
+
455
+ def decode(self, data: bytes) -> bytes:
456
+ """Decodes data.
457
+
458
+ Decoding is done according the specified Content-Encoding
459
+ or Content-Transfer-Encoding headers value.
460
+ """
461
+ if CONTENT_TRANSFER_ENCODING in self.headers:
462
+ data = self._decode_content_transfer(data)
463
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
464
+ if not self._is_form_data and CONTENT_ENCODING in self.headers:
465
+ return self._decode_content(data)
466
+ return data
467
+
468
+ def _decode_content(self, data: bytes) -> bytes:
469
+ encoding = self.headers.get(CONTENT_ENCODING, "").lower()
470
+ if encoding == "identity":
471
+ return data
472
+ if encoding in {"deflate", "gzip"}:
473
+ return ZLibDecompressor(
474
+ encoding=encoding,
475
+ suppress_deflate_header=True,
476
+ ).decompress_sync(data)
477
+
478
+ raise RuntimeError(f"unknown content encoding: {encoding}")
479
+
480
+ def _decode_content_transfer(self, data: bytes) -> bytes:
481
+ encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
482
+
483
+ if encoding == "base64":
484
+ return base64.b64decode(data)
485
+ elif encoding == "quoted-printable":
486
+ return binascii.a2b_qp(data)
487
+ elif encoding in ("binary", "8bit", "7bit"):
488
+ return data
489
+ else:
490
+ raise RuntimeError(
491
+ "unknown content transfer encoding: {}" "".format(encoding)
492
+ )
493
+
494
+ def get_charset(self, default: str) -> str:
495
+ """Returns charset parameter from Content-Type header or default."""
496
+ ctype = self.headers.get(CONTENT_TYPE, "")
497
+ mimetype = parse_mimetype(ctype)
498
+ return mimetype.parameters.get("charset", self._default_charset or default)
499
+
500
+ @reify
501
+ def name(self) -> Optional[str]:
502
+ """Returns name specified in Content-Disposition header.
503
+
504
+ If the header is missing or malformed, returns None.
505
+ """
506
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
507
+ return content_disposition_filename(params, "name")
508
+
509
+ @reify
510
+ def filename(self) -> Optional[str]:
511
+ """Returns filename specified in Content-Disposition header.
512
+
513
+ Returns None if the header is missing or malformed.
514
+ """
515
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
516
+ return content_disposition_filename(params, "filename")
517
+
518
+
519
+ @payload_type(BodyPartReader, order=Order.try_first)
520
+ class BodyPartReaderPayload(Payload):
521
+ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
522
+ super().__init__(value, *args, **kwargs)
523
+
524
+ params: Dict[str, str] = {}
525
+ if value.name is not None:
526
+ params["name"] = value.name
527
+ if value.filename is not None:
528
+ params["filename"] = value.filename
529
+
530
+ if params:
531
+ self.set_content_disposition("attachment", True, **params)
532
+
533
+ async def write(self, writer: Any) -> None:
534
+ field = self._value
535
+ chunk = await field.read_chunk(size=2**16)
536
+ while chunk:
537
+ await writer.write(field.decode(chunk))
538
+ chunk = await field.read_chunk(size=2**16)
539
+
540
+
541
+ class MultipartReader:
542
+ """Multipart body reader."""
543
+
544
+ #: Response wrapper, used when multipart readers constructs from response.
545
+ response_wrapper_cls = MultipartResponseWrapper
546
+ #: Multipart reader class, used to handle multipart/* body parts.
547
+ #: None points to type(self)
548
+ multipart_reader_cls = None
549
+ #: Body part reader class for non multipart/* content types.
550
+ part_reader_cls = BodyPartReader
551
+
552
+ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
553
+ self._mimetype = parse_mimetype(headers[CONTENT_TYPE])
554
+ assert self._mimetype.type == "multipart", "multipart/* content type expected"
555
+ if "boundary" not in self._mimetype.parameters:
556
+ raise ValueError(
557
+ "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE]
558
+ )
559
+
560
+ self.headers = headers
561
+ self._boundary = ("--" + self._get_boundary()).encode()
562
+ self._content = content
563
+ self._default_charset: Optional[str] = None
564
+ self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
565
+ self._at_eof = False
566
+ self._at_bof = True
567
+ self._unread: List[bytes] = []
568
+
569
+ def __aiter__(
570
+ self,
571
+ ) -> AsyncIterator["BodyPartReader"]:
572
+ return self # type: ignore[return-value]
573
+
574
+ async def __anext__(
575
+ self,
576
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
577
+ part = await self.next()
578
+ if part is None:
579
+ raise StopAsyncIteration
580
+ return part
581
+
582
+ @classmethod
583
+ def from_response(
584
+ cls,
585
+ response: "ClientResponse",
586
+ ) -> MultipartResponseWrapper:
587
+ """Constructs reader instance from HTTP response.
588
+
589
+ :param response: :class:`~aiohttp.client.ClientResponse` instance
590
+ """
591
+ obj = cls.response_wrapper_cls(
592
+ response, cls(response.headers, response.content)
593
+ )
594
+ return obj
595
+
596
+ def at_eof(self) -> bool:
597
+ """Returns True if the final boundary was reached, false otherwise."""
598
+ return self._at_eof
599
+
600
+ async def next(
601
+ self,
602
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
603
+ """Emits the next multipart body part."""
604
+ # So, if we're at BOF, we need to skip till the boundary.
605
+ if self._at_eof:
606
+ return None
607
+ await self._maybe_release_last_part()
608
+ if self._at_bof:
609
+ await self._read_until_first_boundary()
610
+ self._at_bof = False
611
+ else:
612
+ await self._read_boundary()
613
+ if self._at_eof: # we just read the last boundary, nothing to do there
614
+ return None
615
+
616
+ part = await self.fetch_next_part()
617
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6
618
+ if (
619
+ self._last_part is None
620
+ and self._mimetype.subtype == "form-data"
621
+ and isinstance(part, BodyPartReader)
622
+ ):
623
+ _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION))
624
+ if params.get("name") == "_charset_":
625
+ # Longest encoding in https://encoding.spec.whatwg.org/encodings.json
626
+ # is 19 characters, so 32 should be more than enough for any valid encoding.
627
+ charset = await part.read_chunk(32)
628
+ if len(charset) > 31:
629
+ raise RuntimeError("Invalid default charset")
630
+ self._default_charset = charset.strip().decode()
631
+ part = await self.fetch_next_part()
632
+ self._last_part = part
633
+ return self._last_part
634
+
635
+ async def release(self) -> None:
636
+ """Reads all the body parts to the void till the final boundary."""
637
+ while not self._at_eof:
638
+ item = await self.next()
639
+ if item is None:
640
+ break
641
+ await item.release()
642
+
643
+ async def fetch_next_part(
644
+ self,
645
+ ) -> Union["MultipartReader", BodyPartReader]:
646
+ """Returns the next body part reader."""
647
+ headers = await self._read_headers()
648
+ return self._get_part_reader(headers)
649
+
650
+ def _get_part_reader(
651
+ self,
652
+ headers: "CIMultiDictProxy[str]",
653
+ ) -> Union["MultipartReader", BodyPartReader]:
654
+ """Dispatches the response by the `Content-Type` header.
655
+
656
+ Returns a suitable reader instance.
657
+
658
+ :param dict headers: Response headers
659
+ """
660
+ ctype = headers.get(CONTENT_TYPE, "")
661
+ mimetype = parse_mimetype(ctype)
662
+
663
+ if mimetype.type == "multipart":
664
+ if self.multipart_reader_cls is None:
665
+ return type(self)(headers, self._content)
666
+ return self.multipart_reader_cls(headers, self._content)
667
+ else:
668
+ return self.part_reader_cls(
669
+ self._boundary,
670
+ headers,
671
+ self._content,
672
+ subtype=self._mimetype.subtype,
673
+ default_charset=self._default_charset,
674
+ )
675
+
676
+ def _get_boundary(self) -> str:
677
+ boundary = self._mimetype.parameters["boundary"]
678
+ if len(boundary) > 70:
679
+ raise ValueError("boundary %r is too long (70 chars max)" % boundary)
680
+
681
+ return boundary
682
+
683
+ async def _readline(self) -> bytes:
684
+ if self._unread:
685
+ return self._unread.pop()
686
+ return await self._content.readline()
687
+
688
+ async def _read_until_first_boundary(self) -> None:
689
+ while True:
690
+ chunk = await self._readline()
691
+ if chunk == b"":
692
+ raise ValueError(
693
+ "Could not find starting boundary %r" % (self._boundary)
694
+ )
695
+ chunk = chunk.rstrip()
696
+ if chunk == self._boundary:
697
+ return
698
+ elif chunk == self._boundary + b"--":
699
+ self._at_eof = True
700
+ return
701
+
702
+ async def _read_boundary(self) -> None:
703
+ chunk = (await self._readline()).rstrip()
704
+ if chunk == self._boundary:
705
+ pass
706
+ elif chunk == self._boundary + b"--":
707
+ self._at_eof = True
708
+ epilogue = await self._readline()
709
+ next_line = await self._readline()
710
+
711
+ # the epilogue is expected and then either the end of input or the
712
+ # parent multipart boundary, if the parent boundary is found then
713
+ # it should be marked as unread and handed to the parent for
714
+ # processing
715
+ if next_line[:2] == b"--":
716
+ self._unread.append(next_line)
717
+ # otherwise the request is likely missing an epilogue and both
718
+ # lines should be passed to the parent for processing
719
+ # (this handles the old behavior gracefully)
720
+ else:
721
+ self._unread.extend([next_line, epilogue])
722
+ else:
723
+ raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
724
+
725
+ async def _read_headers(self) -> "CIMultiDictProxy[str]":
726
+ lines = [b""]
727
+ while True:
728
+ chunk = await self._content.readline()
729
+ chunk = chunk.strip()
730
+ lines.append(chunk)
731
+ if not chunk:
732
+ break
733
+ parser = HeadersParser()
734
+ headers, raw_headers = parser.parse_headers(lines)
735
+ return headers
736
+
737
+ async def _maybe_release_last_part(self) -> None:
738
+ """Ensures that the last read body part is read completely."""
739
+ if self._last_part is not None:
740
+ if not self._last_part.at_eof():
741
+ await self._last_part.release()
742
+ self._unread.extend(self._last_part._unread)
743
+ self._last_part = None
744
+
745
+
746
+ _Part = Tuple[Payload, str, str]
747
+
748
+
749
+ class MultipartWriter(Payload):
750
+ """Multipart body writer."""
751
+
752
+ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
753
+ boundary = boundary if boundary is not None else uuid.uuid4().hex
754
+ # The underlying Payload API demands a str (utf-8), not bytes,
755
+ # so we need to ensure we don't lose anything during conversion.
756
+ # As a result, require the boundary to be ASCII only.
757
+ # In both situations.
758
+
759
+ try:
760
+ self._boundary = boundary.encode("ascii")
761
+ except UnicodeEncodeError:
762
+ raise ValueError("boundary should contain ASCII only chars") from None
763
+ ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
764
+
765
+ super().__init__(None, content_type=ctype)
766
+
767
+ self._parts: List[_Part] = []
768
+ self._is_form_data = subtype == "form-data"
769
+
770
+ def __enter__(self) -> "MultipartWriter":
771
+ return self
772
+
773
+ def __exit__(
774
+ self,
775
+ exc_type: Optional[Type[BaseException]],
776
+ exc_val: Optional[BaseException],
777
+ exc_tb: Optional[TracebackType],
778
+ ) -> None:
779
+ pass
780
+
781
+ def __iter__(self) -> Iterator[_Part]:
782
+ return iter(self._parts)
783
+
784
+ def __len__(self) -> int:
785
+ return len(self._parts)
786
+
787
+ def __bool__(self) -> bool:
788
+ return True
789
+
790
+ _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
791
+ _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
792
+
793
+ @property
794
+ def _boundary_value(self) -> str:
795
+ """Wrap boundary parameter value in quotes, if necessary.
796
+
797
+ Reads self.boundary and returns a unicode string.
798
+ """
799
+ # Refer to RFCs 7231, 7230, 5234.
800
+ #
801
+ # parameter = token "=" ( token / quoted-string )
802
+ # token = 1*tchar
803
+ # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
804
+ # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
805
+ # obs-text = %x80-FF
806
+ # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
807
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
808
+ # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
809
+ # / DIGIT / ALPHA
810
+ # ; any VCHAR, except delimiters
811
+ # VCHAR = %x21-7E
812
+ value = self._boundary
813
+ if re.match(self._valid_tchar_regex, value):
814
+ return value.decode("ascii") # cannot fail
815
+
816
+ if re.search(self._invalid_qdtext_char_regex, value):
817
+ raise ValueError("boundary value contains invalid characters")
818
+
819
+ # escape %x5C and %x22
820
+ quoted_value_content = value.replace(b"\\", b"\\\\")
821
+ quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
822
+
823
+ return '"' + quoted_value_content.decode("ascii") + '"'
824
+
825
+ @property
826
+ def boundary(self) -> str:
827
+ return self._boundary.decode("ascii")
828
+
829
+ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload:
830
+ if headers is None:
831
+ headers = CIMultiDict()
832
+
833
+ if isinstance(obj, Payload):
834
+ obj.headers.update(headers)
835
+ return self.append_payload(obj)
836
+ else:
837
+ try:
838
+ payload = get_payload(obj, headers=headers)
839
+ except LookupError:
840
+ raise TypeError("Cannot create payload from %r" % obj)
841
+ else:
842
+ return self.append_payload(payload)
843
+
844
+ def append_payload(self, payload: Payload) -> Payload:
845
+ """Adds a new body part to multipart writer."""
846
+ encoding: Optional[str] = None
847
+ te_encoding: Optional[str] = None
848
+ if self._is_form_data:
849
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7
850
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
851
+ assert CONTENT_DISPOSITION in payload.headers
852
+ assert "name=" in payload.headers[CONTENT_DISPOSITION]
853
+ assert (
854
+ not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING}
855
+ & payload.headers.keys()
856
+ )
857
+ else:
858
+ # compression
859
+ encoding = payload.headers.get(CONTENT_ENCODING, "").lower()
860
+ if encoding and encoding not in ("deflate", "gzip", "identity"):
861
+ raise RuntimeError(f"unknown content encoding: {encoding}")
862
+ if encoding == "identity":
863
+ encoding = None
864
+
865
+ # te encoding
866
+ te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
867
+ if te_encoding not in ("", "base64", "quoted-printable", "binary"):
868
+ raise RuntimeError(f"unknown content transfer encoding: {te_encoding}")
869
+ if te_encoding == "binary":
870
+ te_encoding = None
871
+
872
+ # size
873
+ size = payload.size
874
+ if size is not None and not (encoding or te_encoding):
875
+ payload.headers[CONTENT_LENGTH] = str(size)
876
+
877
+ self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
878
+ return payload
879
+
880
+ def append_json(
881
+ self, obj: Any, headers: Optional[Mapping[str, str]] = None
882
+ ) -> Payload:
883
+ """Helper to append JSON part."""
884
+ if headers is None:
885
+ headers = CIMultiDict()
886
+
887
+ return self.append_payload(JsonPayload(obj, headers=headers))
888
+
889
+ def append_form(
890
+ self,
891
+ obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
892
+ headers: Optional[Mapping[str, str]] = None,
893
+ ) -> Payload:
894
+ """Helper to append form urlencoded part."""
895
+ assert isinstance(obj, (Sequence, Mapping))
896
+
897
+ if headers is None:
898
+ headers = CIMultiDict()
899
+
900
+ if isinstance(obj, Mapping):
901
+ obj = list(obj.items())
902
+ data = urlencode(obj, doseq=True)
903
+
904
+ return self.append_payload(
905
+ StringPayload(
906
+ data, headers=headers, content_type="application/x-www-form-urlencoded"
907
+ )
908
+ )
909
+
910
+ @property
911
+ def size(self) -> Optional[int]:
912
+ """Size of the payload."""
913
+ total = 0
914
+ for part, encoding, te_encoding in self._parts:
915
+ if encoding or te_encoding or part.size is None:
916
+ return None
917
+
918
+ total += int(
919
+ 2
920
+ + len(self._boundary)
921
+ + 2
922
+ + part.size # b'--'+self._boundary+b'\r\n'
923
+ + len(part._binary_headers)
924
+ + 2 # b'\r\n'
925
+ )
926
+
927
+ total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
928
+ return total
929
+
930
+ async def write(self, writer: Any, close_boundary: bool = True) -> None:
931
+ """Write body."""
932
+ for part, encoding, te_encoding in self._parts:
933
+ await writer.write(b"--" + self._boundary + b"\r\n")
934
+ await writer.write(part._binary_headers)
935
+
936
+ if encoding or te_encoding:
937
+ w = MultipartPayloadWriter(writer)
938
+ if encoding:
939
+ w.enable_compression(encoding)
940
+ if te_encoding:
941
+ w.enable_encoding(te_encoding)
942
+ await part.write(w) # type: ignore[arg-type]
943
+ await w.write_eof()
944
+ else:
945
+ await part.write(writer)
946
+
947
+ await writer.write(b"\r\n")
948
+
949
+ if close_boundary:
950
+ await writer.write(b"--" + self._boundary + b"--\r\n")
951
+
952
+
953
+ class MultipartPayloadWriter:
954
+ def __init__(self, writer: Any) -> None:
955
+ self._writer = writer
956
+ self._encoding: Optional[str] = None
957
+ self._compress: Optional[ZLibCompressor] = None
958
+ self._encoding_buffer: Optional[bytearray] = None
959
+
960
+ def enable_encoding(self, encoding: str) -> None:
961
+ if encoding == "base64":
962
+ self._encoding = encoding
963
+ self._encoding_buffer = bytearray()
964
+ elif encoding == "quoted-printable":
965
+ self._encoding = "quoted-printable"
966
+
967
+ def enable_compression(
968
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
969
+ ) -> None:
970
+ self._compress = ZLibCompressor(
971
+ encoding=encoding,
972
+ suppress_deflate_header=True,
973
+ strategy=strategy,
974
+ )
975
+
976
+ async def write_eof(self) -> None:
977
+ if self._compress is not None:
978
+ chunk = self._compress.flush()
979
+ if chunk:
980
+ self._compress = None
981
+ await self.write(chunk)
982
+
983
+ if self._encoding == "base64":
984
+ if self._encoding_buffer:
985
+ await self._writer.write(base64.b64encode(self._encoding_buffer))
986
+
987
+ async def write(self, chunk: bytes) -> None:
988
+ if self._compress is not None:
989
+ if chunk:
990
+ chunk = await self._compress.compress(chunk)
991
+ if not chunk:
992
+ return
993
+
994
+ if self._encoding == "base64":
995
+ buf = self._encoding_buffer
996
+ assert buf is not None
997
+ buf.extend(chunk)
998
+
999
+ if buf:
1000
+ div, mod = divmod(len(buf), 3)
1001
+ enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
1002
+ if enc_chunk:
1003
+ b64chunk = base64.b64encode(enc_chunk)
1004
+ await self._writer.write(b64chunk)
1005
+ elif self._encoding == "quoted-printable":
1006
+ await self._writer.write(binascii.b2a_qp(chunk))
1007
+ else:
1008
+ await self._writer.write(chunk)
env-llmeval/lib/python3.10/site-packages/aiohttp/payload.py ADDED
@@ -0,0 +1,463 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import enum
3
+ import io
4
+ import json
5
+ import mimetypes
6
+ import os
7
+ import warnings
8
+ from abc import ABC, abstractmethod
9
+ from itertools import chain
10
+ from typing import (
11
+ IO,
12
+ TYPE_CHECKING,
13
+ Any,
14
+ ByteString,
15
+ Dict,
16
+ Final,
17
+ Iterable,
18
+ Optional,
19
+ TextIO,
20
+ Tuple,
21
+ Type,
22
+ Union,
23
+ )
24
+
25
+ from multidict import CIMultiDict
26
+
27
+ from . import hdrs
28
+ from .abc import AbstractStreamWriter
29
+ from .helpers import (
30
+ _SENTINEL,
31
+ content_disposition_header,
32
+ guess_filename,
33
+ parse_mimetype,
34
+ sentinel,
35
+ )
36
+ from .streams import StreamReader
37
+ from .typedefs import JSONEncoder, _CIMultiDict
38
+
39
+ __all__ = (
40
+ "PAYLOAD_REGISTRY",
41
+ "get_payload",
42
+ "payload_type",
43
+ "Payload",
44
+ "BytesPayload",
45
+ "StringPayload",
46
+ "IOBasePayload",
47
+ "BytesIOPayload",
48
+ "BufferedReaderPayload",
49
+ "TextIOPayload",
50
+ "StringIOPayload",
51
+ "JsonPayload",
52
+ "AsyncIterablePayload",
53
+ )
54
+
55
+ TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
56
+
57
+ if TYPE_CHECKING:
58
+ from typing import List
59
+
60
+
61
+ class LookupError(Exception):
62
+ pass
63
+
64
+
65
+ class Order(str, enum.Enum):
66
+ normal = "normal"
67
+ try_first = "try_first"
68
+ try_last = "try_last"
69
+
70
+
71
+ def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
72
+ return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
73
+
74
+
75
+ def register_payload(
76
+ factory: Type["Payload"], type: Any, *, order: Order = Order.normal
77
+ ) -> None:
78
+ PAYLOAD_REGISTRY.register(factory, type, order=order)
79
+
80
+
81
+ class payload_type:
82
+ def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
83
+ self.type = type
84
+ self.order = order
85
+
86
+ def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
87
+ register_payload(factory, self.type, order=self.order)
88
+ return factory
89
+
90
+
91
+ PayloadType = Type["Payload"]
92
+ _PayloadRegistryItem = Tuple[PayloadType, Any]
93
+
94
+
95
+ class PayloadRegistry:
96
+ """Payload registry.
97
+
98
+ note: we need zope.interface for more efficient adapter search
99
+ """
100
+
101
+ def __init__(self) -> None:
102
+ self._first: List[_PayloadRegistryItem] = []
103
+ self._normal: List[_PayloadRegistryItem] = []
104
+ self._last: List[_PayloadRegistryItem] = []
105
+
106
+ def get(
107
+ self,
108
+ data: Any,
109
+ *args: Any,
110
+ _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
111
+ **kwargs: Any,
112
+ ) -> "Payload":
113
+ if isinstance(data, Payload):
114
+ return data
115
+ for factory, type in _CHAIN(self._first, self._normal, self._last):
116
+ if isinstance(data, type):
117
+ return factory(data, *args, **kwargs)
118
+
119
+ raise LookupError()
120
+
121
+ def register(
122
+ self, factory: PayloadType, type: Any, *, order: Order = Order.normal
123
+ ) -> None:
124
+ if order is Order.try_first:
125
+ self._first.append((factory, type))
126
+ elif order is Order.normal:
127
+ self._normal.append((factory, type))
128
+ elif order is Order.try_last:
129
+ self._last.append((factory, type))
130
+ else:
131
+ raise ValueError(f"Unsupported order {order!r}")
132
+
133
+
134
+ class Payload(ABC):
135
+
136
+ _default_content_type: str = "application/octet-stream"
137
+ _size: Optional[int] = None
138
+
139
+ def __init__(
140
+ self,
141
+ value: Any,
142
+ headers: Optional[
143
+ Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
144
+ ] = None,
145
+ content_type: Union[str, None, _SENTINEL] = sentinel,
146
+ filename: Optional[str] = None,
147
+ encoding: Optional[str] = None,
148
+ **kwargs: Any,
149
+ ) -> None:
150
+ self._encoding = encoding
151
+ self._filename = filename
152
+ self._headers: _CIMultiDict = CIMultiDict()
153
+ self._value = value
154
+ if content_type is not sentinel and content_type is not None:
155
+ self._headers[hdrs.CONTENT_TYPE] = content_type
156
+ elif self._filename is not None:
157
+ content_type = mimetypes.guess_type(self._filename)[0]
158
+ if content_type is None:
159
+ content_type = self._default_content_type
160
+ self._headers[hdrs.CONTENT_TYPE] = content_type
161
+ else:
162
+ self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
163
+ self._headers.update(headers or {})
164
+
165
+ @property
166
+ def size(self) -> Optional[int]:
167
+ """Size of the payload."""
168
+ return self._size
169
+
170
+ @property
171
+ def filename(self) -> Optional[str]:
172
+ """Filename of the payload."""
173
+ return self._filename
174
+
175
+ @property
176
+ def headers(self) -> _CIMultiDict:
177
+ """Custom item headers"""
178
+ return self._headers
179
+
180
+ @property
181
+ def _binary_headers(self) -> bytes:
182
+ return (
183
+ "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
184
+ "utf-8"
185
+ )
186
+ + b"\r\n"
187
+ )
188
+
189
+ @property
190
+ def encoding(self) -> Optional[str]:
191
+ """Payload encoding"""
192
+ return self._encoding
193
+
194
+ @property
195
+ def content_type(self) -> str:
196
+ """Content type"""
197
+ return self._headers[hdrs.CONTENT_TYPE]
198
+
199
+ def set_content_disposition(
200
+ self,
201
+ disptype: str,
202
+ quote_fields: bool = True,
203
+ _charset: str = "utf-8",
204
+ **params: Any,
205
+ ) -> None:
206
+ """Sets ``Content-Disposition`` header."""
207
+ self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
208
+ disptype, quote_fields=quote_fields, _charset=_charset, **params
209
+ )
210
+
211
+ @abstractmethod
212
+ async def write(self, writer: AbstractStreamWriter) -> None:
213
+ """Write payload.
214
+
215
+ writer is an AbstractStreamWriter instance:
216
+ """
217
+
218
+
219
+ class BytesPayload(Payload):
220
+ def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
221
+ if not isinstance(value, (bytes, bytearray, memoryview)):
222
+ raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
223
+
224
+ if "content_type" not in kwargs:
225
+ kwargs["content_type"] = "application/octet-stream"
226
+
227
+ super().__init__(value, *args, **kwargs)
228
+
229
+ if isinstance(value, memoryview):
230
+ self._size = value.nbytes
231
+ else:
232
+ self._size = len(value)
233
+
234
+ if self._size > TOO_LARGE_BYTES_BODY:
235
+ kwargs = {"source": self}
236
+ warnings.warn(
237
+ "Sending a large body directly with raw bytes might"
238
+ " lock the event loop. You should probably pass an "
239
+ "io.BytesIO object instead",
240
+ ResourceWarning,
241
+ **kwargs,
242
+ )
243
+
244
+ async def write(self, writer: AbstractStreamWriter) -> None:
245
+ await writer.write(self._value)
246
+
247
+
248
+ class StringPayload(BytesPayload):
249
+ def __init__(
250
+ self,
251
+ value: str,
252
+ *args: Any,
253
+ encoding: Optional[str] = None,
254
+ content_type: Optional[str] = None,
255
+ **kwargs: Any,
256
+ ) -> None:
257
+
258
+ if encoding is None:
259
+ if content_type is None:
260
+ real_encoding = "utf-8"
261
+ content_type = "text/plain; charset=utf-8"
262
+ else:
263
+ mimetype = parse_mimetype(content_type)
264
+ real_encoding = mimetype.parameters.get("charset", "utf-8")
265
+ else:
266
+ if content_type is None:
267
+ content_type = "text/plain; charset=%s" % encoding
268
+ real_encoding = encoding
269
+
270
+ super().__init__(
271
+ value.encode(real_encoding),
272
+ encoding=real_encoding,
273
+ content_type=content_type,
274
+ *args,
275
+ **kwargs,
276
+ )
277
+
278
+
279
+ class StringIOPayload(StringPayload):
280
+ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
281
+ super().__init__(value.read(), *args, **kwargs)
282
+
283
+
284
+ class IOBasePayload(Payload):
285
+ _value: IO[Any]
286
+
287
+ def __init__(
288
+ self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
289
+ ) -> None:
290
+ if "filename" not in kwargs:
291
+ kwargs["filename"] = guess_filename(value)
292
+
293
+ super().__init__(value, *args, **kwargs)
294
+
295
+ if self._filename is not None and disposition is not None:
296
+ if hdrs.CONTENT_DISPOSITION not in self.headers:
297
+ self.set_content_disposition(disposition, filename=self._filename)
298
+
299
+ async def write(self, writer: AbstractStreamWriter) -> None:
300
+ loop = asyncio.get_event_loop()
301
+ try:
302
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
303
+ while chunk:
304
+ await writer.write(chunk)
305
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
306
+ finally:
307
+ await loop.run_in_executor(None, self._value.close)
308
+
309
+
310
+ class TextIOPayload(IOBasePayload):
311
+ _value: TextIO
312
+
313
+ def __init__(
314
+ self,
315
+ value: TextIO,
316
+ *args: Any,
317
+ encoding: Optional[str] = None,
318
+ content_type: Optional[str] = None,
319
+ **kwargs: Any,
320
+ ) -> None:
321
+
322
+ if encoding is None:
323
+ if content_type is None:
324
+ encoding = "utf-8"
325
+ content_type = "text/plain; charset=utf-8"
326
+ else:
327
+ mimetype = parse_mimetype(content_type)
328
+ encoding = mimetype.parameters.get("charset", "utf-8")
329
+ else:
330
+ if content_type is None:
331
+ content_type = "text/plain; charset=%s" % encoding
332
+
333
+ super().__init__(
334
+ value,
335
+ content_type=content_type,
336
+ encoding=encoding,
337
+ *args,
338
+ **kwargs,
339
+ )
340
+
341
+ @property
342
+ def size(self) -> Optional[int]:
343
+ try:
344
+ return os.fstat(self._value.fileno()).st_size - self._value.tell()
345
+ except OSError:
346
+ return None
347
+
348
+ async def write(self, writer: AbstractStreamWriter) -> None:
349
+ loop = asyncio.get_event_loop()
350
+ try:
351
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
352
+ while chunk:
353
+ data = (
354
+ chunk.encode(encoding=self._encoding)
355
+ if self._encoding
356
+ else chunk.encode()
357
+ )
358
+ await writer.write(data)
359
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
360
+ finally:
361
+ await loop.run_in_executor(None, self._value.close)
362
+
363
+
364
+ class BytesIOPayload(IOBasePayload):
365
+ @property
366
+ def size(self) -> int:
367
+ position = self._value.tell()
368
+ end = self._value.seek(0, os.SEEK_END)
369
+ self._value.seek(position)
370
+ return end - position
371
+
372
+
373
+ class BufferedReaderPayload(IOBasePayload):
374
+ @property
375
+ def size(self) -> Optional[int]:
376
+ try:
377
+ return os.fstat(self._value.fileno()).st_size - self._value.tell()
378
+ except OSError:
379
+ # data.fileno() is not supported, e.g.
380
+ # io.BufferedReader(io.BytesIO(b'data'))
381
+ return None
382
+
383
+
384
+ class JsonPayload(BytesPayload):
385
+ def __init__(
386
+ self,
387
+ value: Any,
388
+ encoding: str = "utf-8",
389
+ content_type: str = "application/json",
390
+ dumps: JSONEncoder = json.dumps,
391
+ *args: Any,
392
+ **kwargs: Any,
393
+ ) -> None:
394
+
395
+ super().__init__(
396
+ dumps(value).encode(encoding),
397
+ content_type=content_type,
398
+ encoding=encoding,
399
+ *args,
400
+ **kwargs,
401
+ )
402
+
403
+
404
+ if TYPE_CHECKING:
405
+ from typing import AsyncIterable, AsyncIterator
406
+
407
+ _AsyncIterator = AsyncIterator[bytes]
408
+ _AsyncIterable = AsyncIterable[bytes]
409
+ else:
410
+ from collections.abc import AsyncIterable, AsyncIterator
411
+
412
+ _AsyncIterator = AsyncIterator
413
+ _AsyncIterable = AsyncIterable
414
+
415
+
416
+ class AsyncIterablePayload(Payload):
417
+
418
+ _iter: Optional[_AsyncIterator] = None
419
+
420
+ def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
421
+ if not isinstance(value, AsyncIterable):
422
+ raise TypeError(
423
+ "value argument must support "
424
+ "collections.abc.AsyncIterable interface, "
425
+ "got {!r}".format(type(value))
426
+ )
427
+
428
+ if "content_type" not in kwargs:
429
+ kwargs["content_type"] = "application/octet-stream"
430
+
431
+ super().__init__(value, *args, **kwargs)
432
+
433
+ self._iter = value.__aiter__()
434
+
435
+ async def write(self, writer: AbstractStreamWriter) -> None:
436
+ if self._iter:
437
+ try:
438
+ # iter is not None check prevents rare cases
439
+ # when the case iterable is used twice
440
+ while True:
441
+ chunk = await self._iter.__anext__()
442
+ await writer.write(chunk)
443
+ except StopAsyncIteration:
444
+ self._iter = None
445
+
446
+
447
+ class StreamReaderPayload(AsyncIterablePayload):
448
+ def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
449
+ super().__init__(value.iter_any(), *args, **kwargs)
450
+
451
+
452
+ PAYLOAD_REGISTRY = PayloadRegistry()
453
+ PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
454
+ PAYLOAD_REGISTRY.register(StringPayload, str)
455
+ PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
456
+ PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
457
+ PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
458
+ PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
459
+ PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
460
+ PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
461
+ # try_last for giving a chance to more specialized async interables like
462
+ # multidict.BodyPartReaderPayload override the default
463
+ PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
env-llmeval/lib/python3.10/site-packages/aiohttp/payload_streamer.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Payload implementation for coroutines as data provider.
3
+
4
+ As a simple case, you can upload data from file::
5
+
6
+ @aiohttp.streamer
7
+ async def file_sender(writer, file_name=None):
8
+ with open(file_name, 'rb') as f:
9
+ chunk = f.read(2**16)
10
+ while chunk:
11
+ await writer.write(chunk)
12
+
13
+ chunk = f.read(2**16)
14
+
15
+ Then you can use `file_sender` like this:
16
+
17
+ async with session.post('http://httpbin.org/post',
18
+ data=file_sender(file_name='huge_file')) as resp:
19
+ print(await resp.text())
20
+
21
+ ..note:: Coroutine must accept `writer` as first argument
22
+
23
+ """
24
+
25
+ import types
26
+ import warnings
27
+ from typing import Any, Awaitable, Callable, Dict, Tuple
28
+
29
+ from .abc import AbstractStreamWriter
30
+ from .payload import Payload, payload_type
31
+
32
+ __all__ = ("streamer",)
33
+
34
+
35
+ class _stream_wrapper:
36
+ def __init__(
37
+ self,
38
+ coro: Callable[..., Awaitable[None]],
39
+ args: Tuple[Any, ...],
40
+ kwargs: Dict[str, Any],
41
+ ) -> None:
42
+ self.coro = types.coroutine(coro)
43
+ self.args = args
44
+ self.kwargs = kwargs
45
+
46
+ async def __call__(self, writer: AbstractStreamWriter) -> None:
47
+ await self.coro(writer, *self.args, **self.kwargs)
48
+
49
+
50
+ class streamer:
51
+ def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
52
+ warnings.warn(
53
+ "@streamer is deprecated, use async generators instead",
54
+ DeprecationWarning,
55
+ stacklevel=2,
56
+ )
57
+ self.coro = coro
58
+
59
+ def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
60
+ return _stream_wrapper(self.coro, args, kwargs)
61
+
62
+
63
+ @payload_type(_stream_wrapper)
64
+ class StreamWrapperPayload(Payload):
65
+ async def write(self, writer: AbstractStreamWriter) -> None:
66
+ await self._value(writer)
67
+
68
+
69
+ @payload_type(streamer)
70
+ class StreamPayload(StreamWrapperPayload):
71
+ def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
72
+ super().__init__(value(), *args, **kwargs)
73
+
74
+ async def write(self, writer: AbstractStreamWriter) -> None:
75
+ await self._value(writer)
env-llmeval/lib/python3.10/site-packages/aiohttp/py.typed ADDED
@@ -0,0 +1 @@
 
 
1
+ Marker
env-llmeval/lib/python3.10/site-packages/aiohttp/pytest_plugin.py ADDED
@@ -0,0 +1,381 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import contextlib
3
+ import warnings
4
+ from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union
5
+
6
+ import pytest
7
+
8
+ from aiohttp.helpers import isasyncgenfunction
9
+ from aiohttp.web import Application
10
+
11
+ from .test_utils import (
12
+ BaseTestServer,
13
+ RawTestServer,
14
+ TestClient,
15
+ TestServer,
16
+ loop_context,
17
+ setup_test_loop,
18
+ teardown_test_loop,
19
+ unused_port as _unused_port,
20
+ )
21
+
22
+ try:
23
+ import uvloop
24
+ except ImportError: # pragma: no cover
25
+ uvloop = None # type: ignore[assignment]
26
+
27
+ AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
28
+ AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]]
29
+ AiohttpServer = Callable[[Application], Awaitable[TestServer]]
30
+
31
+
32
+ def pytest_addoption(parser): # type: ignore[no-untyped-def]
33
+ parser.addoption(
34
+ "--aiohttp-fast",
35
+ action="store_true",
36
+ default=False,
37
+ help="run tests faster by disabling extra checks",
38
+ )
39
+ parser.addoption(
40
+ "--aiohttp-loop",
41
+ action="store",
42
+ default="pyloop",
43
+ help="run tests with specific loop: pyloop, uvloop or all",
44
+ )
45
+ parser.addoption(
46
+ "--aiohttp-enable-loop-debug",
47
+ action="store_true",
48
+ default=False,
49
+ help="enable event loop debug mode",
50
+ )
51
+
52
+
53
+ def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
54
+ """Set up pytest fixture.
55
+
56
+ Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
57
+ """
58
+ func = fixturedef.func
59
+
60
+ if isasyncgenfunction(func):
61
+ # async generator fixture
62
+ is_async_gen = True
63
+ elif asyncio.iscoroutinefunction(func):
64
+ # regular async fixture
65
+ is_async_gen = False
66
+ else:
67
+ # not an async fixture, nothing to do
68
+ return
69
+
70
+ strip_request = False
71
+ if "request" not in fixturedef.argnames:
72
+ fixturedef.argnames += ("request",)
73
+ strip_request = True
74
+
75
+ def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
76
+ request = kwargs["request"]
77
+ if strip_request:
78
+ del kwargs["request"]
79
+
80
+ # if neither the fixture nor the test use the 'loop' fixture,
81
+ # 'getfixturevalue' will fail because the test is not parameterized
82
+ # (this can be removed someday if 'loop' is no longer parameterized)
83
+ if "loop" not in request.fixturenames:
84
+ raise Exception(
85
+ "Asynchronous fixtures must depend on the 'loop' fixture or "
86
+ "be used in tests depending from it."
87
+ )
88
+
89
+ _loop = request.getfixturevalue("loop")
90
+
91
+ if is_async_gen:
92
+ # for async generators, we need to advance the generator once,
93
+ # then advance it again in a finalizer
94
+ gen = func(*args, **kwargs)
95
+
96
+ def finalizer(): # type: ignore[no-untyped-def]
97
+ try:
98
+ return _loop.run_until_complete(gen.__anext__())
99
+ except StopAsyncIteration:
100
+ pass
101
+
102
+ request.addfinalizer(finalizer)
103
+ return _loop.run_until_complete(gen.__anext__())
104
+ else:
105
+ return _loop.run_until_complete(func(*args, **kwargs))
106
+
107
+ fixturedef.func = wrapper
108
+
109
+
110
+ @pytest.fixture
111
+ def fast(request): # type: ignore[no-untyped-def]
112
+ """--fast config option"""
113
+ return request.config.getoption("--aiohttp-fast")
114
+
115
+
116
+ @pytest.fixture
117
+ def loop_debug(request): # type: ignore[no-untyped-def]
118
+ """--enable-loop-debug config option"""
119
+ return request.config.getoption("--aiohttp-enable-loop-debug")
120
+
121
+
122
+ @contextlib.contextmanager
123
+ def _runtime_warning_context(): # type: ignore[no-untyped-def]
124
+ """Context manager which checks for RuntimeWarnings.
125
+
126
+ This exists specifically to
127
+ avoid "coroutine 'X' was never awaited" warnings being missed.
128
+
129
+ If RuntimeWarnings occur in the context a RuntimeError is raised.
130
+ """
131
+ with warnings.catch_warnings(record=True) as _warnings:
132
+ yield
133
+ rw = [
134
+ "{w.filename}:{w.lineno}:{w.message}".format(w=w)
135
+ for w in _warnings
136
+ if w.category == RuntimeWarning
137
+ ]
138
+ if rw:
139
+ raise RuntimeError(
140
+ "{} Runtime Warning{},\n{}".format(
141
+ len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
142
+ )
143
+ )
144
+
145
+
146
+ @contextlib.contextmanager
147
+ def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
148
+ """Passthrough loop context.
149
+
150
+ Sets up and tears down a loop unless one is passed in via the loop
151
+ argument when it's passed straight through.
152
+ """
153
+ if loop:
154
+ # loop already exists, pass it straight through
155
+ yield loop
156
+ else:
157
+ # this shadows loop_context's standard behavior
158
+ loop = setup_test_loop()
159
+ yield loop
160
+ teardown_test_loop(loop, fast=fast)
161
+
162
+
163
+ def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
164
+ """Fix pytest collecting for coroutines."""
165
+ if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
166
+ return list(collector._genfunctions(name, obj))
167
+
168
+
169
+ def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
170
+ """Run coroutines in an event loop instead of a normal function call."""
171
+ fast = pyfuncitem.config.getoption("--aiohttp-fast")
172
+ if asyncio.iscoroutinefunction(pyfuncitem.function):
173
+ existing_loop = pyfuncitem.funcargs.get(
174
+ "proactor_loop"
175
+ ) or pyfuncitem.funcargs.get("loop", None)
176
+ with _runtime_warning_context():
177
+ with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
178
+ testargs = {
179
+ arg: pyfuncitem.funcargs[arg]
180
+ for arg in pyfuncitem._fixtureinfo.argnames
181
+ }
182
+ _loop.run_until_complete(pyfuncitem.obj(**testargs))
183
+
184
+ return True
185
+
186
+
187
+ def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
188
+ if "loop_factory" not in metafunc.fixturenames:
189
+ return
190
+
191
+ loops = metafunc.config.option.aiohttp_loop
192
+ avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
193
+ avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
194
+
195
+ if uvloop is not None: # pragma: no cover
196
+ avail_factories["uvloop"] = uvloop.EventLoopPolicy
197
+
198
+ if loops == "all":
199
+ loops = "pyloop,uvloop?"
200
+
201
+ factories = {} # type: ignore[var-annotated]
202
+ for name in loops.split(","):
203
+ required = not name.endswith("?")
204
+ name = name.strip(" ?")
205
+ if name not in avail_factories: # pragma: no cover
206
+ if required:
207
+ raise ValueError(
208
+ "Unknown loop '%s', available loops: %s"
209
+ % (name, list(factories.keys()))
210
+ )
211
+ else:
212
+ continue
213
+ factories[name] = avail_factories[name]
214
+ metafunc.parametrize(
215
+ "loop_factory", list(factories.values()), ids=list(factories.keys())
216
+ )
217
+
218
+
219
+ @pytest.fixture
220
+ def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
221
+ """Return an instance of the event loop."""
222
+ policy = loop_factory()
223
+ asyncio.set_event_loop_policy(policy)
224
+ with loop_context(fast=fast) as _loop:
225
+ if loop_debug:
226
+ _loop.set_debug(True) # pragma: no cover
227
+ asyncio.set_event_loop(_loop)
228
+ yield _loop
229
+
230
+
231
+ @pytest.fixture
232
+ def proactor_loop(): # type: ignore[no-untyped-def]
233
+ policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
234
+ asyncio.set_event_loop_policy(policy)
235
+
236
+ with loop_context(policy.new_event_loop) as _loop:
237
+ asyncio.set_event_loop(_loop)
238
+ yield _loop
239
+
240
+
241
+ @pytest.fixture
242
+ def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
243
+ warnings.warn(
244
+ "Deprecated, use aiohttp_unused_port fixture instead",
245
+ DeprecationWarning,
246
+ stacklevel=2,
247
+ )
248
+ return aiohttp_unused_port
249
+
250
+
251
+ @pytest.fixture
252
+ def aiohttp_unused_port() -> Callable[[], int]:
253
+ """Return a port that is unused on the current host."""
254
+ return _unused_port
255
+
256
+
257
+ @pytest.fixture
258
+ def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
259
+ """Factory to create a TestServer instance, given an app.
260
+
261
+ aiohttp_server(app, **kwargs)
262
+ """
263
+ servers = []
264
+
265
+ async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
266
+ server = TestServer(app, port=port)
267
+ await server.start_server(loop=loop, **kwargs)
268
+ servers.append(server)
269
+ return server
270
+
271
+ yield go
272
+
273
+ async def finalize() -> None:
274
+ while servers:
275
+ await servers.pop().close()
276
+
277
+ loop.run_until_complete(finalize())
278
+
279
+
280
+ @pytest.fixture
281
+ def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
282
+ warnings.warn(
283
+ "Deprecated, use aiohttp_server fixture instead",
284
+ DeprecationWarning,
285
+ stacklevel=2,
286
+ )
287
+ return aiohttp_server
288
+
289
+
290
+ @pytest.fixture
291
+ def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
292
+ """Factory to create a RawTestServer instance, given a web handler.
293
+
294
+ aiohttp_raw_server(handler, **kwargs)
295
+ """
296
+ servers = []
297
+
298
+ async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
299
+ server = RawTestServer(handler, port=port)
300
+ await server.start_server(loop=loop, **kwargs)
301
+ servers.append(server)
302
+ return server
303
+
304
+ yield go
305
+
306
+ async def finalize() -> None:
307
+ while servers:
308
+ await servers.pop().close()
309
+
310
+ loop.run_until_complete(finalize())
311
+
312
+
313
+ @pytest.fixture
314
+ def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
315
+ aiohttp_raw_server,
316
+ ):
317
+ warnings.warn(
318
+ "Deprecated, use aiohttp_raw_server fixture instead",
319
+ DeprecationWarning,
320
+ stacklevel=2,
321
+ )
322
+ return aiohttp_raw_server
323
+
324
+
325
+ @pytest.fixture
326
+ def aiohttp_client(
327
+ loop: asyncio.AbstractEventLoop,
328
+ ) -> Iterator[AiohttpClient]:
329
+ """Factory to create a TestClient instance.
330
+
331
+ aiohttp_client(app, **kwargs)
332
+ aiohttp_client(server, **kwargs)
333
+ aiohttp_client(raw_server, **kwargs)
334
+ """
335
+ clients = []
336
+
337
+ async def go(
338
+ __param: Union[Application, BaseTestServer],
339
+ *args: Any,
340
+ server_kwargs: Optional[Dict[str, Any]] = None,
341
+ **kwargs: Any
342
+ ) -> TestClient:
343
+
344
+ if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
345
+ __param, (Application, BaseTestServer)
346
+ ):
347
+ __param = __param(loop, *args, **kwargs)
348
+ kwargs = {}
349
+ else:
350
+ assert not args, "args should be empty"
351
+
352
+ if isinstance(__param, Application):
353
+ server_kwargs = server_kwargs or {}
354
+ server = TestServer(__param, loop=loop, **server_kwargs)
355
+ client = TestClient(server, loop=loop, **kwargs)
356
+ elif isinstance(__param, BaseTestServer):
357
+ client = TestClient(__param, loop=loop, **kwargs)
358
+ else:
359
+ raise ValueError("Unknown argument type: %r" % type(__param))
360
+
361
+ await client.start_server()
362
+ clients.append(client)
363
+ return client
364
+
365
+ yield go
366
+
367
+ async def finalize() -> None:
368
+ while clients:
369
+ await clients.pop().close()
370
+
371
+ loop.run_until_complete(finalize())
372
+
373
+
374
+ @pytest.fixture
375
+ def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
376
+ warnings.warn(
377
+ "Deprecated, use aiohttp_client fixture instead",
378
+ DeprecationWarning,
379
+ stacklevel=2,
380
+ )
381
+ return aiohttp_client
env-llmeval/lib/python3.10/site-packages/aiohttp/resolver.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import socket
3
+ from typing import Any, Dict, List, Optional, Type, Union
4
+
5
+ from .abc import AbstractResolver
6
+ from .helpers import get_running_loop
7
+
8
+ __all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
9
+
10
+ try:
11
+ import aiodns
12
+
13
+ # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
14
+ except ImportError: # pragma: no cover
15
+ aiodns = None
16
+
17
+ aiodns_default = False
18
+
19
+
20
+ class ThreadedResolver(AbstractResolver):
21
+ """Threaded resolver.
22
+
23
+ Uses an Executor for synchronous getaddrinfo() calls.
24
+ concurrent.futures.ThreadPoolExecutor is used by default.
25
+ """
26
+
27
+ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
28
+ self._loop = get_running_loop(loop)
29
+
30
+ async def resolve(
31
+ self, hostname: str, port: int = 0, family: int = socket.AF_INET
32
+ ) -> List[Dict[str, Any]]:
33
+ infos = await self._loop.getaddrinfo(
34
+ hostname,
35
+ port,
36
+ type=socket.SOCK_STREAM,
37
+ family=family,
38
+ flags=socket.AI_ADDRCONFIG,
39
+ )
40
+
41
+ hosts = []
42
+ for family, _, proto, _, address in infos:
43
+ if family == socket.AF_INET6:
44
+ if len(address) < 3:
45
+ # IPv6 is not supported by Python build,
46
+ # or IPv6 is not enabled in the host
47
+ continue
48
+ if address[3]:
49
+ # This is essential for link-local IPv6 addresses.
50
+ # LL IPv6 is a VERY rare case. Strictly speaking, we should use
51
+ # getnameinfo() unconditionally, but performance makes sense.
52
+ host, _port = socket.getnameinfo(
53
+ address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
54
+ )
55
+ port = int(_port)
56
+ else:
57
+ host, port = address[:2]
58
+ else: # IPv4
59
+ assert family == socket.AF_INET
60
+ host, port = address # type: ignore[misc]
61
+ hosts.append(
62
+ {
63
+ "hostname": hostname,
64
+ "host": host,
65
+ "port": port,
66
+ "family": family,
67
+ "proto": proto,
68
+ "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
69
+ }
70
+ )
71
+
72
+ return hosts
73
+
74
+ async def close(self) -> None:
75
+ pass
76
+
77
+
78
+ class AsyncResolver(AbstractResolver):
79
+ """Use the `aiodns` package to make asynchronous DNS lookups"""
80
+
81
+ def __init__(
82
+ self,
83
+ loop: Optional[asyncio.AbstractEventLoop] = None,
84
+ *args: Any,
85
+ **kwargs: Any
86
+ ) -> None:
87
+ if aiodns is None:
88
+ raise RuntimeError("Resolver requires aiodns library")
89
+
90
+ self._loop = get_running_loop(loop)
91
+ self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
92
+
93
+ if not hasattr(self._resolver, "gethostbyname"):
94
+ # aiodns 1.1 is not available, fallback to DNSResolver.query
95
+ self.resolve = self._resolve_with_query # type: ignore
96
+
97
+ async def resolve(
98
+ self, host: str, port: int = 0, family: int = socket.AF_INET
99
+ ) -> List[Dict[str, Any]]:
100
+ try:
101
+ resp = await self._resolver.gethostbyname(host, family)
102
+ except aiodns.error.DNSError as exc:
103
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
104
+ raise OSError(msg) from exc
105
+ hosts = []
106
+ for address in resp.addresses:
107
+ hosts.append(
108
+ {
109
+ "hostname": host,
110
+ "host": address,
111
+ "port": port,
112
+ "family": family,
113
+ "proto": 0,
114
+ "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
115
+ }
116
+ )
117
+
118
+ if not hosts:
119
+ raise OSError("DNS lookup failed")
120
+
121
+ return hosts
122
+
123
+ async def _resolve_with_query(
124
+ self, host: str, port: int = 0, family: int = socket.AF_INET
125
+ ) -> List[Dict[str, Any]]:
126
+ if family == socket.AF_INET6:
127
+ qtype = "AAAA"
128
+ else:
129
+ qtype = "A"
130
+
131
+ try:
132
+ resp = await self._resolver.query(host, qtype)
133
+ except aiodns.error.DNSError as exc:
134
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
135
+ raise OSError(msg) from exc
136
+
137
+ hosts = []
138
+ for rr in resp:
139
+ hosts.append(
140
+ {
141
+ "hostname": host,
142
+ "host": rr.host,
143
+ "port": port,
144
+ "family": family,
145
+ "proto": 0,
146
+ "flags": socket.AI_NUMERICHOST,
147
+ }
148
+ )
149
+
150
+ if not hosts:
151
+ raise OSError("DNS lookup failed")
152
+
153
+ return hosts
154
+
155
+ async def close(self) -> None:
156
+ self._resolver.cancel()
157
+
158
+
159
+ _DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
160
+ DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
env-llmeval/lib/python3.10/site-packages/aiohttp/streams.py ADDED
@@ -0,0 +1,684 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import collections
3
+ import warnings
4
+ from typing import (
5
+ Awaitable,
6
+ Callable,
7
+ Deque,
8
+ Final,
9
+ Generic,
10
+ List,
11
+ Optional,
12
+ Tuple,
13
+ TypeVar,
14
+ )
15
+
16
+ from .base_protocol import BaseProtocol
17
+ from .helpers import (
18
+ _EXC_SENTINEL,
19
+ BaseTimerContext,
20
+ TimerNoop,
21
+ set_exception,
22
+ set_result,
23
+ )
24
+ from .log import internal_logger
25
+
26
+ __all__ = (
27
+ "EMPTY_PAYLOAD",
28
+ "EofStream",
29
+ "StreamReader",
30
+ "DataQueue",
31
+ "FlowControlDataQueue",
32
+ )
33
+
34
+ _T = TypeVar("_T")
35
+
36
+
37
+ class EofStream(Exception):
38
+ """eof stream indication."""
39
+
40
+
41
+ class AsyncStreamIterator(Generic[_T]):
42
+ def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
43
+ self.read_func = read_func
44
+
45
+ def __aiter__(self) -> "AsyncStreamIterator[_T]":
46
+ return self
47
+
48
+ async def __anext__(self) -> _T:
49
+ try:
50
+ rv = await self.read_func()
51
+ except EofStream:
52
+ raise StopAsyncIteration
53
+ if rv == b"":
54
+ raise StopAsyncIteration
55
+ return rv
56
+
57
+
58
+ class ChunkTupleAsyncStreamIterator:
59
+ def __init__(self, stream: "StreamReader") -> None:
60
+ self._stream = stream
61
+
62
+ def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
63
+ return self
64
+
65
+ async def __anext__(self) -> Tuple[bytes, bool]:
66
+ rv = await self._stream.readchunk()
67
+ if rv == (b"", False):
68
+ raise StopAsyncIteration
69
+ return rv
70
+
71
+
72
+ class AsyncStreamReaderMixin:
73
+ def __aiter__(self) -> AsyncStreamIterator[bytes]:
74
+ return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
75
+
76
+ def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
77
+ """Returns an asynchronous iterator that yields chunks of size n."""
78
+ return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined]
79
+
80
+ def iter_any(self) -> AsyncStreamIterator[bytes]:
81
+ """Yield all available data as soon as it is received."""
82
+ return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
83
+
84
+ def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
85
+ """Yield chunks of data as they are received by the server.
86
+
87
+ The yielded objects are tuples
88
+ of (bytes, bool) as returned by the StreamReader.readchunk method.
89
+ """
90
+ return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
91
+
92
+
93
+ class StreamReader(AsyncStreamReaderMixin):
94
+ """An enhancement of asyncio.StreamReader.
95
+
96
+ Supports asynchronous iteration by line, chunk or as available::
97
+
98
+ async for line in reader:
99
+ ...
100
+ async for chunk in reader.iter_chunked(1024):
101
+ ...
102
+ async for slice in reader.iter_any():
103
+ ...
104
+
105
+ """
106
+
107
+ total_bytes = 0
108
+
109
+ def __init__(
110
+ self,
111
+ protocol: BaseProtocol,
112
+ limit: int,
113
+ *,
114
+ timer: Optional[BaseTimerContext] = None,
115
+ loop: Optional[asyncio.AbstractEventLoop] = None,
116
+ ) -> None:
117
+ self._protocol = protocol
118
+ self._low_water = limit
119
+ self._high_water = limit * 2
120
+ if loop is None:
121
+ loop = asyncio.get_event_loop()
122
+ self._loop = loop
123
+ self._size = 0
124
+ self._cursor = 0
125
+ self._http_chunk_splits: Optional[List[int]] = None
126
+ self._buffer: Deque[bytes] = collections.deque()
127
+ self._buffer_offset = 0
128
+ self._eof = False
129
+ self._waiter: Optional[asyncio.Future[None]] = None
130
+ self._eof_waiter: Optional[asyncio.Future[None]] = None
131
+ self._exception: Optional[BaseException] = None
132
+ self._timer = TimerNoop() if timer is None else timer
133
+ self._eof_callbacks: List[Callable[[], None]] = []
134
+
135
+ def __repr__(self) -> str:
136
+ info = [self.__class__.__name__]
137
+ if self._size:
138
+ info.append("%d bytes" % self._size)
139
+ if self._eof:
140
+ info.append("eof")
141
+ if self._low_water != 2**16: # default limit
142
+ info.append("low=%d high=%d" % (self._low_water, self._high_water))
143
+ if self._waiter:
144
+ info.append("w=%r" % self._waiter)
145
+ if self._exception:
146
+ info.append("e=%r" % self._exception)
147
+ return "<%s>" % " ".join(info)
148
+
149
+ def get_read_buffer_limits(self) -> Tuple[int, int]:
150
+ return (self._low_water, self._high_water)
151
+
152
+ def exception(self) -> Optional[BaseException]:
153
+ return self._exception
154
+
155
+ def set_exception(
156
+ self,
157
+ exc: BaseException,
158
+ exc_cause: BaseException = _EXC_SENTINEL,
159
+ ) -> None:
160
+ self._exception = exc
161
+ self._eof_callbacks.clear()
162
+
163
+ waiter = self._waiter
164
+ if waiter is not None:
165
+ self._waiter = None
166
+ set_exception(waiter, exc, exc_cause)
167
+
168
+ waiter = self._eof_waiter
169
+ if waiter is not None:
170
+ self._eof_waiter = None
171
+ set_exception(waiter, exc, exc_cause)
172
+
173
+ def on_eof(self, callback: Callable[[], None]) -> None:
174
+ if self._eof:
175
+ try:
176
+ callback()
177
+ except Exception:
178
+ internal_logger.exception("Exception in eof callback")
179
+ else:
180
+ self._eof_callbacks.append(callback)
181
+
182
+ def feed_eof(self) -> None:
183
+ self._eof = True
184
+
185
+ waiter = self._waiter
186
+ if waiter is not None:
187
+ self._waiter = None
188
+ set_result(waiter, None)
189
+
190
+ waiter = self._eof_waiter
191
+ if waiter is not None:
192
+ self._eof_waiter = None
193
+ set_result(waiter, None)
194
+
195
+ for cb in self._eof_callbacks:
196
+ try:
197
+ cb()
198
+ except Exception:
199
+ internal_logger.exception("Exception in eof callback")
200
+
201
+ self._eof_callbacks.clear()
202
+
203
+ def is_eof(self) -> bool:
204
+ """Return True if 'feed_eof' was called."""
205
+ return self._eof
206
+
207
+ def at_eof(self) -> bool:
208
+ """Return True if the buffer is empty and 'feed_eof' was called."""
209
+ return self._eof and not self._buffer
210
+
211
+ async def wait_eof(self) -> None:
212
+ if self._eof:
213
+ return
214
+
215
+ assert self._eof_waiter is None
216
+ self._eof_waiter = self._loop.create_future()
217
+ try:
218
+ await self._eof_waiter
219
+ finally:
220
+ self._eof_waiter = None
221
+
222
+ def unread_data(self, data: bytes) -> None:
223
+ """rollback reading some data from stream, inserting it to buffer head."""
224
+ warnings.warn(
225
+ "unread_data() is deprecated "
226
+ "and will be removed in future releases (#3260)",
227
+ DeprecationWarning,
228
+ stacklevel=2,
229
+ )
230
+ if not data:
231
+ return
232
+
233
+ if self._buffer_offset:
234
+ self._buffer[0] = self._buffer[0][self._buffer_offset :]
235
+ self._buffer_offset = 0
236
+ self._size += len(data)
237
+ self._cursor -= len(data)
238
+ self._buffer.appendleft(data)
239
+ self._eof_counter = 0
240
+
241
+ # TODO: size is ignored, remove the param later
242
+ def feed_data(self, data: bytes, size: int = 0) -> None:
243
+ assert not self._eof, "feed_data after feed_eof"
244
+
245
+ if not data:
246
+ return
247
+
248
+ self._size += len(data)
249
+ self._buffer.append(data)
250
+ self.total_bytes += len(data)
251
+
252
+ waiter = self._waiter
253
+ if waiter is not None:
254
+ self._waiter = None
255
+ set_result(waiter, None)
256
+
257
+ if self._size > self._high_water and not self._protocol._reading_paused:
258
+ self._protocol.pause_reading()
259
+
260
+ def begin_http_chunk_receiving(self) -> None:
261
+ if self._http_chunk_splits is None:
262
+ if self.total_bytes:
263
+ raise RuntimeError(
264
+ "Called begin_http_chunk_receiving when" "some data was already fed"
265
+ )
266
+ self._http_chunk_splits = []
267
+
268
+ def end_http_chunk_receiving(self) -> None:
269
+ if self._http_chunk_splits is None:
270
+ raise RuntimeError(
271
+ "Called end_chunk_receiving without calling "
272
+ "begin_chunk_receiving first"
273
+ )
274
+
275
+ # self._http_chunk_splits contains logical byte offsets from start of
276
+ # the body transfer. Each offset is the offset of the end of a chunk.
277
+ # "Logical" means bytes, accessible for a user.
278
+ # If no chunks containing logical data were received, current position
279
+ # is difinitely zero.
280
+ pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
281
+
282
+ if self.total_bytes == pos:
283
+ # We should not add empty chunks here. So we check for that.
284
+ # Note, when chunked + gzip is used, we can receive a chunk
285
+ # of compressed data, but that data may not be enough for gzip FSM
286
+ # to yield any uncompressed data. That's why current position may
287
+ # not change after receiving a chunk.
288
+ return
289
+
290
+ self._http_chunk_splits.append(self.total_bytes)
291
+
292
+ # wake up readchunk when end of http chunk received
293
+ waiter = self._waiter
294
+ if waiter is not None:
295
+ self._waiter = None
296
+ set_result(waiter, None)
297
+
298
+ async def _wait(self, func_name: str) -> None:
299
+ # StreamReader uses a future to link the protocol feed_data() method
300
+ # to a read coroutine. Running two read coroutines at the same time
301
+ # would have an unexpected behaviour. It would not possible to know
302
+ # which coroutine would get the next data.
303
+ if self._waiter is not None:
304
+ raise RuntimeError(
305
+ "%s() called while another coroutine is "
306
+ "already waiting for incoming data" % func_name
307
+ )
308
+
309
+ waiter = self._waiter = self._loop.create_future()
310
+ try:
311
+ with self._timer:
312
+ await waiter
313
+ finally:
314
+ self._waiter = None
315
+
316
+ async def readline(self) -> bytes:
317
+ return await self.readuntil()
318
+
319
+ async def readuntil(self, separator: bytes = b"\n") -> bytes:
320
+ seplen = len(separator)
321
+ if seplen == 0:
322
+ raise ValueError("Separator should be at least one-byte string")
323
+
324
+ if self._exception is not None:
325
+ raise self._exception
326
+
327
+ chunk = b""
328
+ chunk_size = 0
329
+ not_enough = True
330
+
331
+ while not_enough:
332
+ while self._buffer and not_enough:
333
+ offset = self._buffer_offset
334
+ ichar = self._buffer[0].find(separator, offset) + 1
335
+ # Read from current offset to found separator or to the end.
336
+ data = self._read_nowait_chunk(
337
+ ichar - offset + seplen - 1 if ichar else -1
338
+ )
339
+ chunk += data
340
+ chunk_size += len(data)
341
+ if ichar:
342
+ not_enough = False
343
+
344
+ if chunk_size > self._high_water:
345
+ raise ValueError("Chunk too big")
346
+
347
+ if self._eof:
348
+ break
349
+
350
+ if not_enough:
351
+ await self._wait("readuntil")
352
+
353
+ return chunk
354
+
355
+ async def read(self, n: int = -1) -> bytes:
356
+ if self._exception is not None:
357
+ raise self._exception
358
+
359
+ # migration problem; with DataQueue you have to catch
360
+ # EofStream exception, so common way is to run payload.read() inside
361
+ # infinite loop. what can cause real infinite loop with StreamReader
362
+ # lets keep this code one major release.
363
+ if __debug__:
364
+ if self._eof and not self._buffer:
365
+ self._eof_counter = getattr(self, "_eof_counter", 0) + 1
366
+ if self._eof_counter > 5:
367
+ internal_logger.warning(
368
+ "Multiple access to StreamReader in eof state, "
369
+ "might be infinite loop.",
370
+ stack_info=True,
371
+ )
372
+
373
+ if not n:
374
+ return b""
375
+
376
+ if n < 0:
377
+ # This used to just loop creating a new waiter hoping to
378
+ # collect everything in self._buffer, but that would
379
+ # deadlock if the subprocess sends more than self.limit
380
+ # bytes. So just call self.readany() until EOF.
381
+ blocks = []
382
+ while True:
383
+ block = await self.readany()
384
+ if not block:
385
+ break
386
+ blocks.append(block)
387
+ return b"".join(blocks)
388
+
389
+ # TODO: should be `if` instead of `while`
390
+ # because waiter maybe triggered on chunk end,
391
+ # without feeding any data
392
+ while not self._buffer and not self._eof:
393
+ await self._wait("read")
394
+
395
+ return self._read_nowait(n)
396
+
397
+ async def readany(self) -> bytes:
398
+ if self._exception is not None:
399
+ raise self._exception
400
+
401
+ # TODO: should be `if` instead of `while`
402
+ # because waiter maybe triggered on chunk end,
403
+ # without feeding any data
404
+ while not self._buffer and not self._eof:
405
+ await self._wait("readany")
406
+
407
+ return self._read_nowait(-1)
408
+
409
+ async def readchunk(self) -> Tuple[bytes, bool]:
410
+ """Returns a tuple of (data, end_of_http_chunk).
411
+
412
+ When chunked transfer
413
+ encoding is used, end_of_http_chunk is a boolean indicating if the end
414
+ of the data corresponds to the end of a HTTP chunk , otherwise it is
415
+ always False.
416
+ """
417
+ while True:
418
+ if self._exception is not None:
419
+ raise self._exception
420
+
421
+ while self._http_chunk_splits:
422
+ pos = self._http_chunk_splits.pop(0)
423
+ if pos == self._cursor:
424
+ return (b"", True)
425
+ if pos > self._cursor:
426
+ return (self._read_nowait(pos - self._cursor), True)
427
+ internal_logger.warning(
428
+ "Skipping HTTP chunk end due to data "
429
+ "consumption beyond chunk boundary"
430
+ )
431
+
432
+ if self._buffer:
433
+ return (self._read_nowait_chunk(-1), False)
434
+ # return (self._read_nowait(-1), False)
435
+
436
+ if self._eof:
437
+ # Special case for signifying EOF.
438
+ # (b'', True) is not a final return value actually.
439
+ return (b"", False)
440
+
441
+ await self._wait("readchunk")
442
+
443
+ async def readexactly(self, n: int) -> bytes:
444
+ if self._exception is not None:
445
+ raise self._exception
446
+
447
+ blocks: List[bytes] = []
448
+ while n > 0:
449
+ block = await self.read(n)
450
+ if not block:
451
+ partial = b"".join(blocks)
452
+ raise asyncio.IncompleteReadError(partial, len(partial) + n)
453
+ blocks.append(block)
454
+ n -= len(block)
455
+
456
+ return b"".join(blocks)
457
+
458
+ def read_nowait(self, n: int = -1) -> bytes:
459
+ # default was changed to be consistent with .read(-1)
460
+ #
461
+ # I believe the most users don't know about the method and
462
+ # they are not affected.
463
+ if self._exception is not None:
464
+ raise self._exception
465
+
466
+ if self._waiter and not self._waiter.done():
467
+ raise RuntimeError(
468
+ "Called while some coroutine is waiting for incoming data."
469
+ )
470
+
471
+ return self._read_nowait(n)
472
+
473
+ def _read_nowait_chunk(self, n: int) -> bytes:
474
+ first_buffer = self._buffer[0]
475
+ offset = self._buffer_offset
476
+ if n != -1 and len(first_buffer) - offset > n:
477
+ data = first_buffer[offset : offset + n]
478
+ self._buffer_offset += n
479
+
480
+ elif offset:
481
+ self._buffer.popleft()
482
+ data = first_buffer[offset:]
483
+ self._buffer_offset = 0
484
+
485
+ else:
486
+ data = self._buffer.popleft()
487
+
488
+ self._size -= len(data)
489
+ self._cursor += len(data)
490
+
491
+ chunk_splits = self._http_chunk_splits
492
+ # Prevent memory leak: drop useless chunk splits
493
+ while chunk_splits and chunk_splits[0] < self._cursor:
494
+ chunk_splits.pop(0)
495
+
496
+ if self._size < self._low_water and self._protocol._reading_paused:
497
+ self._protocol.resume_reading()
498
+ return data
499
+
500
+ def _read_nowait(self, n: int) -> bytes:
501
+ """Read not more than n bytes, or whole buffer if n == -1"""
502
+ self._timer.assert_timeout()
503
+
504
+ chunks = []
505
+ while self._buffer:
506
+ chunk = self._read_nowait_chunk(n)
507
+ chunks.append(chunk)
508
+ if n != -1:
509
+ n -= len(chunk)
510
+ if n == 0:
511
+ break
512
+
513
+ return b"".join(chunks) if chunks else b""
514
+
515
+
516
+ class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
517
+ def __init__(self) -> None:
518
+ self._read_eof_chunk = False
519
+
520
+ def __repr__(self) -> str:
521
+ return "<%s>" % self.__class__.__name__
522
+
523
+ def exception(self) -> Optional[BaseException]:
524
+ return None
525
+
526
+ def set_exception(
527
+ self,
528
+ exc: BaseException,
529
+ exc_cause: BaseException = _EXC_SENTINEL,
530
+ ) -> None:
531
+ pass
532
+
533
+ def on_eof(self, callback: Callable[[], None]) -> None:
534
+ try:
535
+ callback()
536
+ except Exception:
537
+ internal_logger.exception("Exception in eof callback")
538
+
539
+ def feed_eof(self) -> None:
540
+ pass
541
+
542
+ def is_eof(self) -> bool:
543
+ return True
544
+
545
+ def at_eof(self) -> bool:
546
+ return True
547
+
548
+ async def wait_eof(self) -> None:
549
+ return
550
+
551
+ def feed_data(self, data: bytes, n: int = 0) -> None:
552
+ pass
553
+
554
+ async def readline(self) -> bytes:
555
+ return b""
556
+
557
+ async def read(self, n: int = -1) -> bytes:
558
+ return b""
559
+
560
+ # TODO add async def readuntil
561
+
562
+ async def readany(self) -> bytes:
563
+ return b""
564
+
565
+ async def readchunk(self) -> Tuple[bytes, bool]:
566
+ if not self._read_eof_chunk:
567
+ self._read_eof_chunk = True
568
+ return (b"", False)
569
+
570
+ return (b"", True)
571
+
572
+ async def readexactly(self, n: int) -> bytes:
573
+ raise asyncio.IncompleteReadError(b"", n)
574
+
575
+ def read_nowait(self, n: int = -1) -> bytes:
576
+ return b""
577
+
578
+
579
+ EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
580
+
581
+
582
+ class DataQueue(Generic[_T]):
583
+ """DataQueue is a general-purpose blocking queue with one reader."""
584
+
585
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
586
+ self._loop = loop
587
+ self._eof = False
588
+ self._waiter: Optional[asyncio.Future[None]] = None
589
+ self._exception: Optional[BaseException] = None
590
+ self._size = 0
591
+ self._buffer: Deque[Tuple[_T, int]] = collections.deque()
592
+
593
+ def __len__(self) -> int:
594
+ return len(self._buffer)
595
+
596
+ def is_eof(self) -> bool:
597
+ return self._eof
598
+
599
+ def at_eof(self) -> bool:
600
+ return self._eof and not self._buffer
601
+
602
+ def exception(self) -> Optional[BaseException]:
603
+ return self._exception
604
+
605
+ def set_exception(
606
+ self,
607
+ exc: BaseException,
608
+ exc_cause: BaseException = _EXC_SENTINEL,
609
+ ) -> None:
610
+ self._eof = True
611
+ self._exception = exc
612
+
613
+ waiter = self._waiter
614
+ if waiter is not None:
615
+ self._waiter = None
616
+ set_exception(waiter, exc, exc_cause)
617
+
618
+ def feed_data(self, data: _T, size: int = 0) -> None:
619
+ self._size += size
620
+ self._buffer.append((data, size))
621
+
622
+ waiter = self._waiter
623
+ if waiter is not None:
624
+ self._waiter = None
625
+ set_result(waiter, None)
626
+
627
+ def feed_eof(self) -> None:
628
+ self._eof = True
629
+
630
+ waiter = self._waiter
631
+ if waiter is not None:
632
+ self._waiter = None
633
+ set_result(waiter, None)
634
+
635
+ async def read(self) -> _T:
636
+ if not self._buffer and not self._eof:
637
+ assert not self._waiter
638
+ self._waiter = self._loop.create_future()
639
+ try:
640
+ await self._waiter
641
+ except (asyncio.CancelledError, asyncio.TimeoutError):
642
+ self._waiter = None
643
+ raise
644
+
645
+ if self._buffer:
646
+ data, size = self._buffer.popleft()
647
+ self._size -= size
648
+ return data
649
+ else:
650
+ if self._exception is not None:
651
+ raise self._exception
652
+ else:
653
+ raise EofStream
654
+
655
+ def __aiter__(self) -> AsyncStreamIterator[_T]:
656
+ return AsyncStreamIterator(self.read)
657
+
658
+
659
+ class FlowControlDataQueue(DataQueue[_T]):
660
+ """FlowControlDataQueue resumes and pauses an underlying stream.
661
+
662
+ It is a destination for parsed data.
663
+ """
664
+
665
+ def __init__(
666
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
667
+ ) -> None:
668
+ super().__init__(loop=loop)
669
+
670
+ self._protocol = protocol
671
+ self._limit = limit * 2
672
+
673
+ def feed_data(self, data: _T, size: int = 0) -> None:
674
+ super().feed_data(data, size)
675
+
676
+ if self._size > self._limit and not self._protocol._reading_paused:
677
+ self._protocol.pause_reading()
678
+
679
+ async def read(self) -> _T:
680
+ try:
681
+ return await super().read()
682
+ finally:
683
+ if self._size < self._limit and self._protocol._reading_paused:
684
+ self._protocol.resume_reading()
env-llmeval/lib/python3.10/site-packages/aiohttp/tcp_helpers.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Helper methods to tune a TCP connection"""
2
+
3
+ import asyncio
4
+ import socket
5
+ from contextlib import suppress
6
+ from typing import Optional # noqa
7
+
8
+ __all__ = ("tcp_keepalive", "tcp_nodelay")
9
+
10
+
11
+ if hasattr(socket, "SO_KEEPALIVE"):
12
+
13
+ def tcp_keepalive(transport: asyncio.Transport) -> None:
14
+ sock = transport.get_extra_info("socket")
15
+ if sock is not None:
16
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
17
+
18
+ else:
19
+
20
+ def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
21
+ pass
22
+
23
+
24
+ def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
25
+ sock = transport.get_extra_info("socket")
26
+
27
+ if sock is None:
28
+ return
29
+
30
+ if sock.family not in (socket.AF_INET, socket.AF_INET6):
31
+ return
32
+
33
+ value = bool(value)
34
+
35
+ # socket may be closed already, on windows OSError get raised
36
+ with suppress(OSError):
37
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
env-llmeval/lib/python3.10/site-packages/aiohttp/test_utils.py ADDED
@@ -0,0 +1,682 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Utilities shared by tests."""
2
+
3
+ import asyncio
4
+ import contextlib
5
+ import gc
6
+ import inspect
7
+ import ipaddress
8
+ import os
9
+ import socket
10
+ import sys
11
+ import warnings
12
+ from abc import ABC, abstractmethod
13
+ from types import TracebackType
14
+ from typing import (
15
+ TYPE_CHECKING,
16
+ Any,
17
+ Callable,
18
+ Iterator,
19
+ List,
20
+ Optional,
21
+ Type,
22
+ Union,
23
+ cast,
24
+ )
25
+ from unittest import IsolatedAsyncioTestCase, mock
26
+
27
+ from aiosignal import Signal
28
+ from multidict import CIMultiDict, CIMultiDictProxy
29
+ from yarl import URL
30
+
31
+ import aiohttp
32
+ from aiohttp.client import _RequestContextManager, _WSRequestContextManager
33
+
34
+ from . import ClientSession, hdrs
35
+ from .abc import AbstractCookieJar
36
+ from .client_reqrep import ClientResponse
37
+ from .client_ws import ClientWebSocketResponse
38
+ from .helpers import sentinel
39
+ from .http import HttpVersion, RawRequestMessage
40
+ from .typedefs import StrOrURL
41
+ from .web import (
42
+ Application,
43
+ AppRunner,
44
+ BaseRunner,
45
+ Request,
46
+ Server,
47
+ ServerRunner,
48
+ SockSite,
49
+ UrlMappingMatchInfo,
50
+ )
51
+ from .web_protocol import _RequestHandler
52
+
53
+ if TYPE_CHECKING:
54
+ from ssl import SSLContext
55
+ else:
56
+ SSLContext = None
57
+
58
+ REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
59
+
60
+
61
+ def get_unused_port_socket(
62
+ host: str, family: socket.AddressFamily = socket.AF_INET
63
+ ) -> socket.socket:
64
+ return get_port_socket(host, 0, family)
65
+
66
+
67
+ def get_port_socket(
68
+ host: str, port: int, family: socket.AddressFamily
69
+ ) -> socket.socket:
70
+ s = socket.socket(family, socket.SOCK_STREAM)
71
+ if REUSE_ADDRESS:
72
+ # Windows has different semantics for SO_REUSEADDR,
73
+ # so don't set it. Ref:
74
+ # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
75
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
76
+ s.bind((host, port))
77
+ return s
78
+
79
+
80
+ def unused_port() -> int:
81
+ """Return a port that is unused on the current host."""
82
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
83
+ s.bind(("127.0.0.1", 0))
84
+ return cast(int, s.getsockname()[1])
85
+
86
+
87
+ class BaseTestServer(ABC):
88
+ __test__ = False
89
+
90
+ def __init__(
91
+ self,
92
+ *,
93
+ scheme: Union[str, object] = sentinel,
94
+ loop: Optional[asyncio.AbstractEventLoop] = None,
95
+ host: str = "127.0.0.1",
96
+ port: Optional[int] = None,
97
+ skip_url_asserts: bool = False,
98
+ socket_factory: Callable[
99
+ [str, int, socket.AddressFamily], socket.socket
100
+ ] = get_port_socket,
101
+ **kwargs: Any,
102
+ ) -> None:
103
+ self._loop = loop
104
+ self.runner: Optional[BaseRunner] = None
105
+ self._root: Optional[URL] = None
106
+ self.host = host
107
+ self.port = port
108
+ self._closed = False
109
+ self.scheme = scheme
110
+ self.skip_url_asserts = skip_url_asserts
111
+ self.socket_factory = socket_factory
112
+
113
+ async def start_server(
114
+ self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
115
+ ) -> None:
116
+ if self.runner:
117
+ return
118
+ self._loop = loop
119
+ self._ssl = kwargs.pop("ssl", None)
120
+ self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
121
+ await self.runner.setup()
122
+ if not self.port:
123
+ self.port = 0
124
+ try:
125
+ version = ipaddress.ip_address(self.host).version
126
+ except ValueError:
127
+ version = 4
128
+ family = socket.AF_INET6 if version == 6 else socket.AF_INET
129
+ _sock = self.socket_factory(self.host, self.port, family)
130
+ self.host, self.port = _sock.getsockname()[:2]
131
+ site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
132
+ await site.start()
133
+ server = site._server
134
+ assert server is not None
135
+ sockets = server.sockets # type: ignore[attr-defined]
136
+ assert sockets is not None
137
+ self.port = sockets[0].getsockname()[1]
138
+ if self.scheme is sentinel:
139
+ if self._ssl:
140
+ scheme = "https"
141
+ else:
142
+ scheme = "http"
143
+ self.scheme = scheme
144
+ self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
145
+
146
+ @abstractmethod # pragma: no cover
147
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
148
+ pass
149
+
150
+ def make_url(self, path: StrOrURL) -> URL:
151
+ assert self._root is not None
152
+ url = URL(path)
153
+ if not self.skip_url_asserts:
154
+ assert not url.is_absolute()
155
+ return self._root.join(url)
156
+ else:
157
+ return URL(str(self._root) + str(path))
158
+
159
+ @property
160
+ def started(self) -> bool:
161
+ return self.runner is not None
162
+
163
+ @property
164
+ def closed(self) -> bool:
165
+ return self._closed
166
+
167
+ @property
168
+ def handler(self) -> Server:
169
+ # for backward compatibility
170
+ # web.Server instance
171
+ runner = self.runner
172
+ assert runner is not None
173
+ assert runner.server is not None
174
+ return runner.server
175
+
176
+ async def close(self) -> None:
177
+ """Close all fixtures created by the test client.
178
+
179
+ After that point, the TestClient is no longer usable.
180
+
181
+ This is an idempotent function: running close multiple times
182
+ will not have any additional effects.
183
+
184
+ close is also run when the object is garbage collected, and on
185
+ exit when used as a context manager.
186
+
187
+ """
188
+ if self.started and not self.closed:
189
+ assert self.runner is not None
190
+ await self.runner.cleanup()
191
+ self._root = None
192
+ self.port = None
193
+ self._closed = True
194
+
195
+ def __enter__(self) -> None:
196
+ raise TypeError("Use async with instead")
197
+
198
+ def __exit__(
199
+ self,
200
+ exc_type: Optional[Type[BaseException]],
201
+ exc_value: Optional[BaseException],
202
+ traceback: Optional[TracebackType],
203
+ ) -> None:
204
+ # __exit__ should exist in pair with __enter__ but never executed
205
+ pass # pragma: no cover
206
+
207
+ async def __aenter__(self) -> "BaseTestServer":
208
+ await self.start_server(loop=self._loop)
209
+ return self
210
+
211
+ async def __aexit__(
212
+ self,
213
+ exc_type: Optional[Type[BaseException]],
214
+ exc_value: Optional[BaseException],
215
+ traceback: Optional[TracebackType],
216
+ ) -> None:
217
+ await self.close()
218
+
219
+
220
+ class TestServer(BaseTestServer):
221
+ def __init__(
222
+ self,
223
+ app: Application,
224
+ *,
225
+ scheme: Union[str, object] = sentinel,
226
+ host: str = "127.0.0.1",
227
+ port: Optional[int] = None,
228
+ **kwargs: Any,
229
+ ):
230
+ self.app = app
231
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
232
+
233
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
234
+ return AppRunner(self.app, **kwargs)
235
+
236
+
237
+ class RawTestServer(BaseTestServer):
238
+ def __init__(
239
+ self,
240
+ handler: _RequestHandler,
241
+ *,
242
+ scheme: Union[str, object] = sentinel,
243
+ host: str = "127.0.0.1",
244
+ port: Optional[int] = None,
245
+ **kwargs: Any,
246
+ ) -> None:
247
+ self._handler = handler
248
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
249
+
250
+ async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
251
+ srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
252
+ return ServerRunner(srv, debug=debug, **kwargs)
253
+
254
+
255
+ class TestClient:
256
+ """
257
+ A test client implementation.
258
+
259
+ To write functional tests for aiohttp based servers.
260
+
261
+ """
262
+
263
+ __test__ = False
264
+
265
+ def __init__(
266
+ self,
267
+ server: BaseTestServer,
268
+ *,
269
+ cookie_jar: Optional[AbstractCookieJar] = None,
270
+ loop: Optional[asyncio.AbstractEventLoop] = None,
271
+ **kwargs: Any,
272
+ ) -> None:
273
+ if not isinstance(server, BaseTestServer):
274
+ raise TypeError(
275
+ "server must be TestServer " "instance, found type: %r" % type(server)
276
+ )
277
+ self._server = server
278
+ self._loop = loop
279
+ if cookie_jar is None:
280
+ cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
281
+ self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
282
+ self._closed = False
283
+ self._responses: List[ClientResponse] = []
284
+ self._websockets: List[ClientWebSocketResponse] = []
285
+
286
+ async def start_server(self) -> None:
287
+ await self._server.start_server(loop=self._loop)
288
+
289
+ @property
290
+ def host(self) -> str:
291
+ return self._server.host
292
+
293
+ @property
294
+ def port(self) -> Optional[int]:
295
+ return self._server.port
296
+
297
+ @property
298
+ def server(self) -> BaseTestServer:
299
+ return self._server
300
+
301
+ @property
302
+ def app(self) -> Optional[Application]:
303
+ return cast(Optional[Application], getattr(self._server, "app", None))
304
+
305
+ @property
306
+ def session(self) -> ClientSession:
307
+ """An internal aiohttp.ClientSession.
308
+
309
+ Unlike the methods on the TestClient, client session requests
310
+ do not automatically include the host in the url queried, and
311
+ will require an absolute path to the resource.
312
+
313
+ """
314
+ return self._session
315
+
316
+ def make_url(self, path: StrOrURL) -> URL:
317
+ return self._server.make_url(path)
318
+
319
+ async def _request(
320
+ self, method: str, path: StrOrURL, **kwargs: Any
321
+ ) -> ClientResponse:
322
+ resp = await self._session.request(method, self.make_url(path), **kwargs)
323
+ # save it to close later
324
+ self._responses.append(resp)
325
+ return resp
326
+
327
+ def request(
328
+ self, method: str, path: StrOrURL, **kwargs: Any
329
+ ) -> _RequestContextManager:
330
+ """Routes a request to tested http server.
331
+
332
+ The interface is identical to aiohttp.ClientSession.request,
333
+ except the loop kwarg is overridden by the instance used by the
334
+ test server.
335
+
336
+ """
337
+ return _RequestContextManager(self._request(method, path, **kwargs))
338
+
339
+ def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
340
+ """Perform an HTTP GET request."""
341
+ return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
342
+
343
+ def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
344
+ """Perform an HTTP POST request."""
345
+ return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
346
+
347
+ def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
348
+ """Perform an HTTP OPTIONS request."""
349
+ return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
350
+
351
+ def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
352
+ """Perform an HTTP HEAD request."""
353
+ return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
354
+
355
+ def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
356
+ """Perform an HTTP PUT request."""
357
+ return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
358
+
359
+ def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
360
+ """Perform an HTTP PATCH request."""
361
+ return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
362
+
363
+ def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
364
+ """Perform an HTTP PATCH request."""
365
+ return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
366
+
367
+ def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
368
+ """Initiate websocket connection.
369
+
370
+ The api corresponds to aiohttp.ClientSession.ws_connect.
371
+
372
+ """
373
+ return _WSRequestContextManager(self._ws_connect(path, **kwargs))
374
+
375
+ async def _ws_connect(
376
+ self, path: StrOrURL, **kwargs: Any
377
+ ) -> ClientWebSocketResponse:
378
+ ws = await self._session.ws_connect(self.make_url(path), **kwargs)
379
+ self._websockets.append(ws)
380
+ return ws
381
+
382
+ async def close(self) -> None:
383
+ """Close all fixtures created by the test client.
384
+
385
+ After that point, the TestClient is no longer usable.
386
+
387
+ This is an idempotent function: running close multiple times
388
+ will not have any additional effects.
389
+
390
+ close is also run on exit when used as a(n) (asynchronous)
391
+ context manager.
392
+
393
+ """
394
+ if not self._closed:
395
+ for resp in self._responses:
396
+ resp.close()
397
+ for ws in self._websockets:
398
+ await ws.close()
399
+ await self._session.close()
400
+ await self._server.close()
401
+ self._closed = True
402
+
403
+ def __enter__(self) -> None:
404
+ raise TypeError("Use async with instead")
405
+
406
+ def __exit__(
407
+ self,
408
+ exc_type: Optional[Type[BaseException]],
409
+ exc: Optional[BaseException],
410
+ tb: Optional[TracebackType],
411
+ ) -> None:
412
+ # __exit__ should exist in pair with __enter__ but never executed
413
+ pass # pragma: no cover
414
+
415
+ async def __aenter__(self) -> "TestClient":
416
+ await self.start_server()
417
+ return self
418
+
419
+ async def __aexit__(
420
+ self,
421
+ exc_type: Optional[Type[BaseException]],
422
+ exc: Optional[BaseException],
423
+ tb: Optional[TracebackType],
424
+ ) -> None:
425
+ await self.close()
426
+
427
+
428
+ class AioHTTPTestCase(IsolatedAsyncioTestCase):
429
+ """A base class to allow for unittest web applications using aiohttp.
430
+
431
+ Provides the following:
432
+
433
+ * self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
434
+ * self.loop (asyncio.BaseEventLoop): the event loop in which the
435
+ application and server are running.
436
+ * self.app (aiohttp.web.Application): the application returned by
437
+ self.get_application()
438
+
439
+ Note that the TestClient's methods are asynchronous: you have to
440
+ execute function on the test client using asynchronous methods.
441
+ """
442
+
443
+ async def get_application(self) -> Application:
444
+ """Get application.
445
+
446
+ This method should be overridden
447
+ to return the aiohttp.web.Application
448
+ object to test.
449
+ """
450
+ return self.get_app()
451
+
452
+ def get_app(self) -> Application:
453
+ """Obsolete method used to constructing web application.
454
+
455
+ Use .get_application() coroutine instead.
456
+ """
457
+ raise RuntimeError("Did you forget to define get_application()?")
458
+
459
+ async def asyncSetUp(self) -> None:
460
+ self.loop = asyncio.get_running_loop()
461
+ return await self.setUpAsync()
462
+
463
+ async def setUpAsync(self) -> None:
464
+ self.app = await self.get_application()
465
+ self.server = await self.get_server(self.app)
466
+ self.client = await self.get_client(self.server)
467
+
468
+ await self.client.start_server()
469
+
470
+ async def asyncTearDown(self) -> None:
471
+ return await self.tearDownAsync()
472
+
473
+ async def tearDownAsync(self) -> None:
474
+ await self.client.close()
475
+
476
+ async def get_server(self, app: Application) -> TestServer:
477
+ """Return a TestServer instance."""
478
+ return TestServer(app, loop=self.loop)
479
+
480
+ async def get_client(self, server: TestServer) -> TestClient:
481
+ """Return a TestClient instance."""
482
+ return TestClient(server, loop=self.loop)
483
+
484
+
485
+ def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
486
+ """
487
+ A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
488
+
489
+ In 3.8+, this does nothing.
490
+ """
491
+ warnings.warn(
492
+ "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
493
+ DeprecationWarning,
494
+ stacklevel=2,
495
+ )
496
+ return func
497
+
498
+
499
+ _LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
500
+
501
+
502
+ @contextlib.contextmanager
503
+ def loop_context(
504
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
505
+ ) -> Iterator[asyncio.AbstractEventLoop]:
506
+ """A contextmanager that creates an event_loop, for test purposes.
507
+
508
+ Handles the creation and cleanup of a test loop.
509
+ """
510
+ loop = setup_test_loop(loop_factory)
511
+ yield loop
512
+ teardown_test_loop(loop, fast=fast)
513
+
514
+
515
+ def setup_test_loop(
516
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
517
+ ) -> asyncio.AbstractEventLoop:
518
+ """Create and return an asyncio.BaseEventLoop instance.
519
+
520
+ The caller should also call teardown_test_loop,
521
+ once they are done with the loop.
522
+ """
523
+ loop = loop_factory()
524
+ asyncio.set_event_loop(loop)
525
+ return loop
526
+
527
+
528
+ def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
529
+ """Teardown and cleanup an event_loop created by setup_test_loop."""
530
+ closed = loop.is_closed()
531
+ if not closed:
532
+ loop.call_soon(loop.stop)
533
+ loop.run_forever()
534
+ loop.close()
535
+
536
+ if not fast:
537
+ gc.collect()
538
+
539
+ asyncio.set_event_loop(None)
540
+
541
+
542
+ def _create_app_mock() -> mock.MagicMock:
543
+ def get_dict(app: Any, key: str) -> Any:
544
+ return app.__app_dict[key]
545
+
546
+ def set_dict(app: Any, key: str, value: Any) -> None:
547
+ app.__app_dict[key] = value
548
+
549
+ app = mock.MagicMock(spec=Application)
550
+ app.__app_dict = {}
551
+ app.__getitem__ = get_dict
552
+ app.__setitem__ = set_dict
553
+
554
+ app._debug = False
555
+ app.on_response_prepare = Signal(app)
556
+ app.on_response_prepare.freeze()
557
+ return app
558
+
559
+
560
+ def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
561
+ transport = mock.Mock()
562
+
563
+ def get_extra_info(key: str) -> Optional[SSLContext]:
564
+ if key == "sslcontext":
565
+ return sslcontext
566
+ else:
567
+ return None
568
+
569
+ transport.get_extra_info.side_effect = get_extra_info
570
+ return transport
571
+
572
+
573
+ def make_mocked_request(
574
+ method: str,
575
+ path: str,
576
+ headers: Any = None,
577
+ *,
578
+ match_info: Any = sentinel,
579
+ version: HttpVersion = HttpVersion(1, 1),
580
+ closing: bool = False,
581
+ app: Any = None,
582
+ writer: Any = sentinel,
583
+ protocol: Any = sentinel,
584
+ transport: Any = sentinel,
585
+ payload: Any = sentinel,
586
+ sslcontext: Optional[SSLContext] = None,
587
+ client_max_size: int = 1024**2,
588
+ loop: Any = ...,
589
+ ) -> Request:
590
+ """Creates mocked web.Request testing purposes.
591
+
592
+ Useful in unit tests, when spinning full web server is overkill or
593
+ specific conditions and errors are hard to trigger.
594
+ """
595
+ task = mock.Mock()
596
+ if loop is ...:
597
+ # no loop passed, try to get the current one if
598
+ # its is running as we need a real loop to create
599
+ # executor jobs to be able to do testing
600
+ # with a real executor
601
+ try:
602
+ loop = asyncio.get_running_loop()
603
+ except RuntimeError:
604
+ loop = mock.Mock()
605
+ loop.create_future.return_value = ()
606
+
607
+ if version < HttpVersion(1, 1):
608
+ closing = True
609
+
610
+ if headers:
611
+ headers = CIMultiDictProxy(CIMultiDict(headers))
612
+ raw_hdrs = tuple(
613
+ (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
614
+ )
615
+ else:
616
+ headers = CIMultiDictProxy(CIMultiDict())
617
+ raw_hdrs = ()
618
+
619
+ chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
620
+
621
+ message = RawRequestMessage(
622
+ method,
623
+ path,
624
+ version,
625
+ headers,
626
+ raw_hdrs,
627
+ closing,
628
+ None,
629
+ False,
630
+ chunked,
631
+ URL(path),
632
+ )
633
+ if app is None:
634
+ app = _create_app_mock()
635
+
636
+ if transport is sentinel:
637
+ transport = _create_transport(sslcontext)
638
+
639
+ if protocol is sentinel:
640
+ protocol = mock.Mock()
641
+ protocol.transport = transport
642
+
643
+ if writer is sentinel:
644
+ writer = mock.Mock()
645
+ writer.write_headers = make_mocked_coro(None)
646
+ writer.write = make_mocked_coro(None)
647
+ writer.write_eof = make_mocked_coro(None)
648
+ writer.drain = make_mocked_coro(None)
649
+ writer.transport = transport
650
+
651
+ protocol.transport = transport
652
+ protocol.writer = writer
653
+
654
+ if payload is sentinel:
655
+ payload = mock.Mock()
656
+
657
+ req = Request(
658
+ message, payload, protocol, writer, task, loop, client_max_size=client_max_size
659
+ )
660
+
661
+ match_info = UrlMappingMatchInfo(
662
+ {} if match_info is sentinel else match_info, mock.Mock()
663
+ )
664
+ match_info.add_app(app)
665
+ req._match_info = match_info
666
+
667
+ return req
668
+
669
+
670
+ def make_mocked_coro(
671
+ return_value: Any = sentinel, raise_exception: Any = sentinel
672
+ ) -> Any:
673
+ """Creates a coroutine mock."""
674
+
675
+ async def mock_coro(*args: Any, **kwargs: Any) -> Any:
676
+ if raise_exception is not sentinel:
677
+ raise raise_exception
678
+ if not inspect.isawaitable(return_value):
679
+ return return_value
680
+ await return_value
681
+
682
+ return mock.Mock(wraps=mock_coro)