applied-ai-018 commited on
Commit
1f59558
·
verified ·
1 Parent(s): 9f7bac3

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/universal/global_step40/zero/8.attention.dense.weight/fp32.pt +3 -0
  2. ckpts/universal/global_step40/zero/8.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt +3 -0
  3. venv/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash +1 -0
  4. venv/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash +1 -0
  5. venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyi.hash +1 -0
  6. venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyx.hash +1 -0
  7. venv/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash +1 -0
  8. venv/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash +1 -0
  9. venv/lib/python3.10/site-packages/aiohttp/.hash/_websocket.pyx.hash +1 -0
  10. venv/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash +1 -0
  11. venv/lib/python3.10/site-packages/aiohttp/__init__.py +240 -0
  12. venv/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc +0 -0
  13. venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc +0 -0
  14. venv/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc +0 -0
  15. venv/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc +0 -0
  16. venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc +0 -0
  17. venv/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc +0 -0
  18. venv/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc +0 -0
  19. venv/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc +0 -0
  20. venv/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc +0 -0
  21. venv/lib/python3.10/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.pyc +0 -0
  22. venv/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc +0 -0
  23. venv/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc +0 -0
  24. venv/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc +0 -0
  25. venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_app.cpython-310.pyc +0 -0
  26. venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc +0 -0
  27. venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc +0 -0
  28. venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc +0 -0
  29. venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc +0 -0
  30. venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc +0 -0
  31. venv/lib/python3.10/site-packages/aiohttp/_cparser.pxd +158 -0
  32. venv/lib/python3.10/site-packages/aiohttp/_find_header.pxd +2 -0
  33. venv/lib/python3.10/site-packages/aiohttp/_headers.pxi +83 -0
  34. venv/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so +0 -0
  35. venv/lib/python3.10/site-packages/aiohttp/_helpers.pyx +35 -0
  36. venv/lib/python3.10/site-packages/aiohttp/_http_parser.pyx +838 -0
  37. venv/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so +0 -0
  38. venv/lib/python3.10/site-packages/aiohttp/_http_writer.pyx +163 -0
  39. venv/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so +0 -0
  40. venv/lib/python3.10/site-packages/aiohttp/_websocket.pyx +56 -0
  41. venv/lib/python3.10/site-packages/aiohttp/abc.py +209 -0
  42. venv/lib/python3.10/site-packages/aiohttp/base_protocol.py +95 -0
  43. venv/lib/python3.10/site-packages/aiohttp/client.py +1366 -0
  44. venv/lib/python3.10/site-packages/aiohttp/client_exceptions.py +346 -0
  45. venv/lib/python3.10/site-packages/aiohttp/client_proto.py +296 -0
  46. venv/lib/python3.10/site-packages/aiohttp/client_reqrep.py +1207 -0
  47. venv/lib/python3.10/site-packages/aiohttp/client_ws.py +315 -0
  48. venv/lib/python3.10/site-packages/aiohttp/cookiejar.py +419 -0
  49. venv/lib/python3.10/site-packages/aiohttp/hdrs.py +108 -0
  50. venv/lib/python3.10/site-packages/aiohttp/helpers.py +1029 -0
ckpts/universal/global_step40/zero/8.attention.dense.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e234f117637293e5e90d091cd10e2987effd0f7ec3ffa41fb381ebc198acb2a
3
+ size 16778317
ckpts/universal/global_step40/zero/8.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:716e79e0883e1fc2cf655d0cd4f5deec32fa899a1b0a012126f3d5aaab7679b1
3
+ size 33555627
venv/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ f2318883e549f69de597009a914603b0f1b10381e265ef5d98af499ad973fb98 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
venv/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd
venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyi.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ 6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyi
venv/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ 5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx
venv/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ abaf11ab4e8ca56f90c0bc6b884de120999620a73895515a587537725b077786 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
venv/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ 6881c0a7c838655e646c645d99971efaf5e310bc3633a7c62b226e39d81842ac /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
venv/lib/python3.10/site-packages/aiohttp/.hash/_websocket.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket.pyx
venv/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ bb39f96a09ff8d789dda1fa4cba63464043c06b3de4c62c31abfb07a231cb6ca /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
venv/lib/python3.10/site-packages/aiohttp/__init__.py ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __version__ = "3.9.5"
2
+
3
+ from typing import TYPE_CHECKING, Tuple
4
+
5
+ from . import hdrs as hdrs
6
+ from .client import (
7
+ BaseConnector as BaseConnector,
8
+ ClientConnectionError as ClientConnectionError,
9
+ ClientConnectorCertificateError as ClientConnectorCertificateError,
10
+ ClientConnectorError as ClientConnectorError,
11
+ ClientConnectorSSLError as ClientConnectorSSLError,
12
+ ClientError as ClientError,
13
+ ClientHttpProxyError as ClientHttpProxyError,
14
+ ClientOSError as ClientOSError,
15
+ ClientPayloadError as ClientPayloadError,
16
+ ClientProxyConnectionError as ClientProxyConnectionError,
17
+ ClientRequest as ClientRequest,
18
+ ClientResponse as ClientResponse,
19
+ ClientResponseError as ClientResponseError,
20
+ ClientSession as ClientSession,
21
+ ClientSSLError as ClientSSLError,
22
+ ClientTimeout as ClientTimeout,
23
+ ClientWebSocketResponse as ClientWebSocketResponse,
24
+ ContentTypeError as ContentTypeError,
25
+ Fingerprint as Fingerprint,
26
+ InvalidURL as InvalidURL,
27
+ NamedPipeConnector as NamedPipeConnector,
28
+ RequestInfo as RequestInfo,
29
+ ServerConnectionError as ServerConnectionError,
30
+ ServerDisconnectedError as ServerDisconnectedError,
31
+ ServerFingerprintMismatch as ServerFingerprintMismatch,
32
+ ServerTimeoutError as ServerTimeoutError,
33
+ TCPConnector as TCPConnector,
34
+ TooManyRedirects as TooManyRedirects,
35
+ UnixConnector as UnixConnector,
36
+ WSServerHandshakeError as WSServerHandshakeError,
37
+ request as request,
38
+ )
39
+ from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
40
+ from .formdata import FormData as FormData
41
+ from .helpers import BasicAuth, ChainMapProxy, ETag
42
+ from .http import (
43
+ HttpVersion as HttpVersion,
44
+ HttpVersion10 as HttpVersion10,
45
+ HttpVersion11 as HttpVersion11,
46
+ WebSocketError as WebSocketError,
47
+ WSCloseCode as WSCloseCode,
48
+ WSMessage as WSMessage,
49
+ WSMsgType as WSMsgType,
50
+ )
51
+ from .multipart import (
52
+ BadContentDispositionHeader as BadContentDispositionHeader,
53
+ BadContentDispositionParam as BadContentDispositionParam,
54
+ BodyPartReader as BodyPartReader,
55
+ MultipartReader as MultipartReader,
56
+ MultipartWriter as MultipartWriter,
57
+ content_disposition_filename as content_disposition_filename,
58
+ parse_content_disposition as parse_content_disposition,
59
+ )
60
+ from .payload import (
61
+ PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
62
+ AsyncIterablePayload as AsyncIterablePayload,
63
+ BufferedReaderPayload as BufferedReaderPayload,
64
+ BytesIOPayload as BytesIOPayload,
65
+ BytesPayload as BytesPayload,
66
+ IOBasePayload as IOBasePayload,
67
+ JsonPayload as JsonPayload,
68
+ Payload as Payload,
69
+ StringIOPayload as StringIOPayload,
70
+ StringPayload as StringPayload,
71
+ TextIOPayload as TextIOPayload,
72
+ get_payload as get_payload,
73
+ payload_type as payload_type,
74
+ )
75
+ from .payload_streamer import streamer as streamer
76
+ from .resolver import (
77
+ AsyncResolver as AsyncResolver,
78
+ DefaultResolver as DefaultResolver,
79
+ ThreadedResolver as ThreadedResolver,
80
+ )
81
+ from .streams import (
82
+ EMPTY_PAYLOAD as EMPTY_PAYLOAD,
83
+ DataQueue as DataQueue,
84
+ EofStream as EofStream,
85
+ FlowControlDataQueue as FlowControlDataQueue,
86
+ StreamReader as StreamReader,
87
+ )
88
+ from .tracing import (
89
+ TraceConfig as TraceConfig,
90
+ TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
91
+ TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
92
+ TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
93
+ TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
94
+ TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
95
+ TraceDnsCacheHitParams as TraceDnsCacheHitParams,
96
+ TraceDnsCacheMissParams as TraceDnsCacheMissParams,
97
+ TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
98
+ TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
99
+ TraceRequestChunkSentParams as TraceRequestChunkSentParams,
100
+ TraceRequestEndParams as TraceRequestEndParams,
101
+ TraceRequestExceptionParams as TraceRequestExceptionParams,
102
+ TraceRequestRedirectParams as TraceRequestRedirectParams,
103
+ TraceRequestStartParams as TraceRequestStartParams,
104
+ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
105
+ )
106
+
107
+ if TYPE_CHECKING:
108
+ # At runtime these are lazy-loaded at the bottom of the file.
109
+ from .worker import (
110
+ GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
111
+ GunicornWebWorker as GunicornWebWorker,
112
+ )
113
+
114
+ __all__: Tuple[str, ...] = (
115
+ "hdrs",
116
+ # client
117
+ "BaseConnector",
118
+ "ClientConnectionError",
119
+ "ClientConnectorCertificateError",
120
+ "ClientConnectorError",
121
+ "ClientConnectorSSLError",
122
+ "ClientError",
123
+ "ClientHttpProxyError",
124
+ "ClientOSError",
125
+ "ClientPayloadError",
126
+ "ClientProxyConnectionError",
127
+ "ClientResponse",
128
+ "ClientRequest",
129
+ "ClientResponseError",
130
+ "ClientSSLError",
131
+ "ClientSession",
132
+ "ClientTimeout",
133
+ "ClientWebSocketResponse",
134
+ "ContentTypeError",
135
+ "Fingerprint",
136
+ "InvalidURL",
137
+ "RequestInfo",
138
+ "ServerConnectionError",
139
+ "ServerDisconnectedError",
140
+ "ServerFingerprintMismatch",
141
+ "ServerTimeoutError",
142
+ "TCPConnector",
143
+ "TooManyRedirects",
144
+ "UnixConnector",
145
+ "NamedPipeConnector",
146
+ "WSServerHandshakeError",
147
+ "request",
148
+ # cookiejar
149
+ "CookieJar",
150
+ "DummyCookieJar",
151
+ # formdata
152
+ "FormData",
153
+ # helpers
154
+ "BasicAuth",
155
+ "ChainMapProxy",
156
+ "ETag",
157
+ # http
158
+ "HttpVersion",
159
+ "HttpVersion10",
160
+ "HttpVersion11",
161
+ "WSMsgType",
162
+ "WSCloseCode",
163
+ "WSMessage",
164
+ "WebSocketError",
165
+ # multipart
166
+ "BadContentDispositionHeader",
167
+ "BadContentDispositionParam",
168
+ "BodyPartReader",
169
+ "MultipartReader",
170
+ "MultipartWriter",
171
+ "content_disposition_filename",
172
+ "parse_content_disposition",
173
+ # payload
174
+ "AsyncIterablePayload",
175
+ "BufferedReaderPayload",
176
+ "BytesIOPayload",
177
+ "BytesPayload",
178
+ "IOBasePayload",
179
+ "JsonPayload",
180
+ "PAYLOAD_REGISTRY",
181
+ "Payload",
182
+ "StringIOPayload",
183
+ "StringPayload",
184
+ "TextIOPayload",
185
+ "get_payload",
186
+ "payload_type",
187
+ # payload_streamer
188
+ "streamer",
189
+ # resolver
190
+ "AsyncResolver",
191
+ "DefaultResolver",
192
+ "ThreadedResolver",
193
+ # streams
194
+ "DataQueue",
195
+ "EMPTY_PAYLOAD",
196
+ "EofStream",
197
+ "FlowControlDataQueue",
198
+ "StreamReader",
199
+ # tracing
200
+ "TraceConfig",
201
+ "TraceConnectionCreateEndParams",
202
+ "TraceConnectionCreateStartParams",
203
+ "TraceConnectionQueuedEndParams",
204
+ "TraceConnectionQueuedStartParams",
205
+ "TraceConnectionReuseconnParams",
206
+ "TraceDnsCacheHitParams",
207
+ "TraceDnsCacheMissParams",
208
+ "TraceDnsResolveHostEndParams",
209
+ "TraceDnsResolveHostStartParams",
210
+ "TraceRequestChunkSentParams",
211
+ "TraceRequestEndParams",
212
+ "TraceRequestExceptionParams",
213
+ "TraceRequestRedirectParams",
214
+ "TraceRequestStartParams",
215
+ "TraceResponseChunkReceivedParams",
216
+ # workers (imported lazily with __getattr__)
217
+ "GunicornUVLoopWebWorker",
218
+ "GunicornWebWorker",
219
+ )
220
+
221
+
222
+ def __dir__() -> Tuple[str, ...]:
223
+ return __all__ + ("__author__", "__doc__")
224
+
225
+
226
+ def __getattr__(name: str) -> object:
227
+ global GunicornUVLoopWebWorker, GunicornWebWorker
228
+
229
+ # Importing gunicorn takes a long time (>100ms), so only import if actually needed.
230
+ if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
231
+ try:
232
+ from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
233
+ except ImportError:
234
+ return None
235
+
236
+ GunicornUVLoopWebWorker = guv # type: ignore[misc]
237
+ GunicornWebWorker = gw # type: ignore[misc]
238
+ return guv if name == "GunicornUVLoopWebWorker" else gw
239
+
240
+ raise AttributeError(f"module {__name__} has no attribute {name}")
venv/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (4.16 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc ADDED
Binary file (11.2 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc ADDED
Binary file (5.22 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc ADDED
Binary file (4.84 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc ADDED
Binary file (5.67 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc ADDED
Binary file (1.65 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc ADDED
Binary file (27.4 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc ADDED
Binary file (4.03 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc ADDED
Binary file (18.9 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.pyc ADDED
Binary file (1.14 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc ADDED
Binary file (21 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc ADDED
Binary file (1.38 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc ADDED
Binary file (11.4 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_app.cpython-310.pyc ADDED
Binary file (16.5 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc ADDED
Binary file (7.37 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc ADDED
Binary file (3.86 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc ADDED
Binary file (24.4 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc ADDED
Binary file (12.1 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc ADDED
Binary file (3.33 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/_cparser.pxd ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
2
+
3
+
4
+ cdef extern from "../vendor/llhttp/build/llhttp.h":
5
+
6
+ struct llhttp__internal_s:
7
+ int32_t _index
8
+ void* _span_pos0
9
+ void* _span_cb0
10
+ int32_t error
11
+ const char* reason
12
+ const char* error_pos
13
+ void* data
14
+ void* _current
15
+ uint64_t content_length
16
+ uint8_t type
17
+ uint8_t method
18
+ uint8_t http_major
19
+ uint8_t http_minor
20
+ uint8_t header_state
21
+ uint8_t lenient_flags
22
+ uint8_t upgrade
23
+ uint8_t finish
24
+ uint16_t flags
25
+ uint16_t status_code
26
+ void* settings
27
+
28
+ ctypedef llhttp__internal_s llhttp__internal_t
29
+ ctypedef llhttp__internal_t llhttp_t
30
+
31
+ ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
32
+ ctypedef int (*llhttp_cb)(llhttp_t*) except -1
33
+
34
+ struct llhttp_settings_s:
35
+ llhttp_cb on_message_begin
36
+ llhttp_data_cb on_url
37
+ llhttp_data_cb on_status
38
+ llhttp_data_cb on_header_field
39
+ llhttp_data_cb on_header_value
40
+ llhttp_cb on_headers_complete
41
+ llhttp_data_cb on_body
42
+ llhttp_cb on_message_complete
43
+ llhttp_cb on_chunk_header
44
+ llhttp_cb on_chunk_complete
45
+
46
+ llhttp_cb on_url_complete
47
+ llhttp_cb on_status_complete
48
+ llhttp_cb on_header_field_complete
49
+ llhttp_cb on_header_value_complete
50
+
51
+ ctypedef llhttp_settings_s llhttp_settings_t
52
+
53
+ enum llhttp_errno:
54
+ HPE_OK,
55
+ HPE_INTERNAL,
56
+ HPE_STRICT,
57
+ HPE_LF_EXPECTED,
58
+ HPE_UNEXPECTED_CONTENT_LENGTH,
59
+ HPE_CLOSED_CONNECTION,
60
+ HPE_INVALID_METHOD,
61
+ HPE_INVALID_URL,
62
+ HPE_INVALID_CONSTANT,
63
+ HPE_INVALID_VERSION,
64
+ HPE_INVALID_HEADER_TOKEN,
65
+ HPE_INVALID_CONTENT_LENGTH,
66
+ HPE_INVALID_CHUNK_SIZE,
67
+ HPE_INVALID_STATUS,
68
+ HPE_INVALID_EOF_STATE,
69
+ HPE_INVALID_TRANSFER_ENCODING,
70
+ HPE_CB_MESSAGE_BEGIN,
71
+ HPE_CB_HEADERS_COMPLETE,
72
+ HPE_CB_MESSAGE_COMPLETE,
73
+ HPE_CB_CHUNK_HEADER,
74
+ HPE_CB_CHUNK_COMPLETE,
75
+ HPE_PAUSED,
76
+ HPE_PAUSED_UPGRADE,
77
+ HPE_USER
78
+
79
+ ctypedef llhttp_errno llhttp_errno_t
80
+
81
+ enum llhttp_flags:
82
+ F_CHUNKED,
83
+ F_CONTENT_LENGTH
84
+
85
+ enum llhttp_type:
86
+ HTTP_REQUEST,
87
+ HTTP_RESPONSE,
88
+ HTTP_BOTH
89
+
90
+ enum llhttp_method:
91
+ HTTP_DELETE,
92
+ HTTP_GET,
93
+ HTTP_HEAD,
94
+ HTTP_POST,
95
+ HTTP_PUT,
96
+ HTTP_CONNECT,
97
+ HTTP_OPTIONS,
98
+ HTTP_TRACE,
99
+ HTTP_COPY,
100
+ HTTP_LOCK,
101
+ HTTP_MKCOL,
102
+ HTTP_MOVE,
103
+ HTTP_PROPFIND,
104
+ HTTP_PROPPATCH,
105
+ HTTP_SEARCH,
106
+ HTTP_UNLOCK,
107
+ HTTP_BIND,
108
+ HTTP_REBIND,
109
+ HTTP_UNBIND,
110
+ HTTP_ACL,
111
+ HTTP_REPORT,
112
+ HTTP_MKACTIVITY,
113
+ HTTP_CHECKOUT,
114
+ HTTP_MERGE,
115
+ HTTP_MSEARCH,
116
+ HTTP_NOTIFY,
117
+ HTTP_SUBSCRIBE,
118
+ HTTP_UNSUBSCRIBE,
119
+ HTTP_PATCH,
120
+ HTTP_PURGE,
121
+ HTTP_MKCALENDAR,
122
+ HTTP_LINK,
123
+ HTTP_UNLINK,
124
+ HTTP_SOURCE,
125
+ HTTP_PRI,
126
+ HTTP_DESCRIBE,
127
+ HTTP_ANNOUNCE,
128
+ HTTP_SETUP,
129
+ HTTP_PLAY,
130
+ HTTP_PAUSE,
131
+ HTTP_TEARDOWN,
132
+ HTTP_GET_PARAMETER,
133
+ HTTP_SET_PARAMETER,
134
+ HTTP_REDIRECT,
135
+ HTTP_RECORD,
136
+ HTTP_FLUSH
137
+
138
+ ctypedef llhttp_method llhttp_method_t;
139
+
140
+ void llhttp_settings_init(llhttp_settings_t* settings)
141
+ void llhttp_init(llhttp_t* parser, llhttp_type type,
142
+ const llhttp_settings_t* settings)
143
+
144
+ llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
145
+
146
+ int llhttp_should_keep_alive(const llhttp_t* parser)
147
+
148
+ void llhttp_resume_after_upgrade(llhttp_t* parser)
149
+
150
+ llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
151
+ const char* llhttp_get_error_reason(const llhttp_t* parser)
152
+ const char* llhttp_get_error_pos(const llhttp_t* parser)
153
+
154
+ const char* llhttp_method_name(llhttp_method_t method)
155
+
156
+ void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
157
+ void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
158
+ void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
venv/lib/python3.10/site-packages/aiohttp/_find_header.pxd ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ cdef extern from "_find_header.h":
2
+ int find_header(char *, int)
venv/lib/python3.10/site-packages/aiohttp/_headers.pxi ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The file is autogenerated from aiohttp/hdrs.py
2
+ # Run ./tools/gen.py to update it after the origin changing.
3
+
4
+ from . import hdrs
5
+ cdef tuple headers = (
6
+ hdrs.ACCEPT,
7
+ hdrs.ACCEPT_CHARSET,
8
+ hdrs.ACCEPT_ENCODING,
9
+ hdrs.ACCEPT_LANGUAGE,
10
+ hdrs.ACCEPT_RANGES,
11
+ hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
12
+ hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
13
+ hdrs.ACCESS_CONTROL_ALLOW_METHODS,
14
+ hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
15
+ hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
16
+ hdrs.ACCESS_CONTROL_MAX_AGE,
17
+ hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
18
+ hdrs.ACCESS_CONTROL_REQUEST_METHOD,
19
+ hdrs.AGE,
20
+ hdrs.ALLOW,
21
+ hdrs.AUTHORIZATION,
22
+ hdrs.CACHE_CONTROL,
23
+ hdrs.CONNECTION,
24
+ hdrs.CONTENT_DISPOSITION,
25
+ hdrs.CONTENT_ENCODING,
26
+ hdrs.CONTENT_LANGUAGE,
27
+ hdrs.CONTENT_LENGTH,
28
+ hdrs.CONTENT_LOCATION,
29
+ hdrs.CONTENT_MD5,
30
+ hdrs.CONTENT_RANGE,
31
+ hdrs.CONTENT_TRANSFER_ENCODING,
32
+ hdrs.CONTENT_TYPE,
33
+ hdrs.COOKIE,
34
+ hdrs.DATE,
35
+ hdrs.DESTINATION,
36
+ hdrs.DIGEST,
37
+ hdrs.ETAG,
38
+ hdrs.EXPECT,
39
+ hdrs.EXPIRES,
40
+ hdrs.FORWARDED,
41
+ hdrs.FROM,
42
+ hdrs.HOST,
43
+ hdrs.IF_MATCH,
44
+ hdrs.IF_MODIFIED_SINCE,
45
+ hdrs.IF_NONE_MATCH,
46
+ hdrs.IF_RANGE,
47
+ hdrs.IF_UNMODIFIED_SINCE,
48
+ hdrs.KEEP_ALIVE,
49
+ hdrs.LAST_EVENT_ID,
50
+ hdrs.LAST_MODIFIED,
51
+ hdrs.LINK,
52
+ hdrs.LOCATION,
53
+ hdrs.MAX_FORWARDS,
54
+ hdrs.ORIGIN,
55
+ hdrs.PRAGMA,
56
+ hdrs.PROXY_AUTHENTICATE,
57
+ hdrs.PROXY_AUTHORIZATION,
58
+ hdrs.RANGE,
59
+ hdrs.REFERER,
60
+ hdrs.RETRY_AFTER,
61
+ hdrs.SEC_WEBSOCKET_ACCEPT,
62
+ hdrs.SEC_WEBSOCKET_EXTENSIONS,
63
+ hdrs.SEC_WEBSOCKET_KEY,
64
+ hdrs.SEC_WEBSOCKET_KEY1,
65
+ hdrs.SEC_WEBSOCKET_PROTOCOL,
66
+ hdrs.SEC_WEBSOCKET_VERSION,
67
+ hdrs.SERVER,
68
+ hdrs.SET_COOKIE,
69
+ hdrs.TE,
70
+ hdrs.TRAILER,
71
+ hdrs.TRANSFER_ENCODING,
72
+ hdrs.URI,
73
+ hdrs.UPGRADE,
74
+ hdrs.USER_AGENT,
75
+ hdrs.VARY,
76
+ hdrs.VIA,
77
+ hdrs.WWW_AUTHENTICATE,
78
+ hdrs.WANT_DIGEST,
79
+ hdrs.WARNING,
80
+ hdrs.X_FORWARDED_FOR,
81
+ hdrs.X_FORWARDED_HOST,
82
+ hdrs.X_FORWARDED_PROTO,
83
+ )
venv/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (509 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/_helpers.pyx ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cdef class reify:
2
+ """Use as a class method decorator. It operates almost exactly like
3
+ the Python `@property` decorator, but it puts the result of the
4
+ method it decorates into the instance dict after the first call,
5
+ effectively replacing the function it decorates with an instance
6
+ variable. It is, in Python parlance, a data descriptor.
7
+
8
+ """
9
+
10
+ cdef object wrapped
11
+ cdef object name
12
+
13
+ def __init__(self, wrapped):
14
+ self.wrapped = wrapped
15
+ self.name = wrapped.__name__
16
+
17
+ @property
18
+ def __doc__(self):
19
+ return self.wrapped.__doc__
20
+
21
+ def __get__(self, inst, owner):
22
+ try:
23
+ try:
24
+ return inst._cache[self.name]
25
+ except KeyError:
26
+ val = self.wrapped(inst)
27
+ inst._cache[self.name] = val
28
+ return val
29
+ except AttributeError:
30
+ if inst is None:
31
+ return self
32
+ raise
33
+
34
+ def __set__(self, inst, value):
35
+ raise AttributeError("reified property is read-only")
venv/lib/python3.10/site-packages/aiohttp/_http_parser.pyx ADDED
@@ -0,0 +1,838 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #cython: language_level=3
2
+ #
3
+ # Based on https://github.com/MagicStack/httptools
4
+ #
5
+
6
+ from cpython cimport (
7
+ Py_buffer,
8
+ PyBUF_SIMPLE,
9
+ PyBuffer_Release,
10
+ PyBytes_AsString,
11
+ PyBytes_AsStringAndSize,
12
+ PyObject_GetBuffer,
13
+ )
14
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc
15
+ from libc.limits cimport ULLONG_MAX
16
+ from libc.string cimport memcpy
17
+
18
+ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
19
+ from yarl import URL as _URL
20
+
21
+ from aiohttp import hdrs
22
+ from aiohttp.helpers import DEBUG, set_exception
23
+
24
+ from .http_exceptions import (
25
+ BadHttpMessage,
26
+ BadStatusLine,
27
+ ContentLengthError,
28
+ InvalidHeader,
29
+ InvalidURLError,
30
+ LineTooLong,
31
+ PayloadEncodingError,
32
+ TransferEncodingError,
33
+ )
34
+ from .http_parser import DeflateBuffer as _DeflateBuffer
35
+ from .http_writer import (
36
+ HttpVersion as _HttpVersion,
37
+ HttpVersion10 as _HttpVersion10,
38
+ HttpVersion11 as _HttpVersion11,
39
+ )
40
+ from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
41
+
42
+ cimport cython
43
+
44
+ from aiohttp cimport _cparser as cparser
45
+
46
+ include "_headers.pxi"
47
+
48
+ from aiohttp cimport _find_header
49
+
50
+ DEF DEFAULT_FREELIST_SIZE = 250
51
+
52
+ cdef extern from "Python.h":
53
+ int PyByteArray_Resize(object, Py_ssize_t) except -1
54
+ Py_ssize_t PyByteArray_Size(object) except -1
55
+ char* PyByteArray_AsString(object)
56
+
57
+ __all__ = ('HttpRequestParser', 'HttpResponseParser',
58
+ 'RawRequestMessage', 'RawResponseMessage')
59
+
60
+ cdef object URL = _URL
61
+ cdef object URL_build = URL.build
62
+ cdef object CIMultiDict = _CIMultiDict
63
+ cdef object CIMultiDictProxy = _CIMultiDictProxy
64
+ cdef object HttpVersion = _HttpVersion
65
+ cdef object HttpVersion10 = _HttpVersion10
66
+ cdef object HttpVersion11 = _HttpVersion11
67
+ cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
68
+ cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
69
+ cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
70
+ cdef object StreamReader = _StreamReader
71
+ cdef object DeflateBuffer = _DeflateBuffer
72
+
73
+
74
+ cdef inline object extend(object buf, const char* at, size_t length):
75
+ cdef Py_ssize_t s
76
+ cdef char* ptr
77
+ s = PyByteArray_Size(buf)
78
+ PyByteArray_Resize(buf, s + length)
79
+ ptr = PyByteArray_AsString(buf)
80
+ memcpy(ptr + s, at, length)
81
+
82
+
83
+ DEF METHODS_COUNT = 46;
84
+
85
+ cdef list _http_method = []
86
+
87
+ for i in range(METHODS_COUNT):
88
+ _http_method.append(
89
+ cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
90
+
91
+
92
+ cdef inline str http_method_str(int i):
93
+ if i < METHODS_COUNT:
94
+ return <str>_http_method[i]
95
+ else:
96
+ return "<unknown>"
97
+
98
+ cdef inline object find_header(bytes raw_header):
99
+ cdef Py_ssize_t size
100
+ cdef char *buf
101
+ cdef int idx
102
+ PyBytes_AsStringAndSize(raw_header, &buf, &size)
103
+ idx = _find_header.find_header(buf, size)
104
+ if idx == -1:
105
+ return raw_header.decode('utf-8', 'surrogateescape')
106
+ return headers[idx]
107
+
108
+
109
+ @cython.freelist(DEFAULT_FREELIST_SIZE)
110
+ cdef class RawRequestMessage:
111
+ cdef readonly str method
112
+ cdef readonly str path
113
+ cdef readonly object version # HttpVersion
114
+ cdef readonly object headers # CIMultiDict
115
+ cdef readonly object raw_headers # tuple
116
+ cdef readonly object should_close
117
+ cdef readonly object compression
118
+ cdef readonly object upgrade
119
+ cdef readonly object chunked
120
+ cdef readonly object url # yarl.URL
121
+
122
+ def __init__(self, method, path, version, headers, raw_headers,
123
+ should_close, compression, upgrade, chunked, url):
124
+ self.method = method
125
+ self.path = path
126
+ self.version = version
127
+ self.headers = headers
128
+ self.raw_headers = raw_headers
129
+ self.should_close = should_close
130
+ self.compression = compression
131
+ self.upgrade = upgrade
132
+ self.chunked = chunked
133
+ self.url = url
134
+
135
+ def __repr__(self):
136
+ info = []
137
+ info.append(("method", self.method))
138
+ info.append(("path", self.path))
139
+ info.append(("version", self.version))
140
+ info.append(("headers", self.headers))
141
+ info.append(("raw_headers", self.raw_headers))
142
+ info.append(("should_close", self.should_close))
143
+ info.append(("compression", self.compression))
144
+ info.append(("upgrade", self.upgrade))
145
+ info.append(("chunked", self.chunked))
146
+ info.append(("url", self.url))
147
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
148
+ return '<RawRequestMessage(' + sinfo + ')>'
149
+
150
+ def _replace(self, **dct):
151
+ cdef RawRequestMessage ret
152
+ ret = _new_request_message(self.method,
153
+ self.path,
154
+ self.version,
155
+ self.headers,
156
+ self.raw_headers,
157
+ self.should_close,
158
+ self.compression,
159
+ self.upgrade,
160
+ self.chunked,
161
+ self.url)
162
+ if "method" in dct:
163
+ ret.method = dct["method"]
164
+ if "path" in dct:
165
+ ret.path = dct["path"]
166
+ if "version" in dct:
167
+ ret.version = dct["version"]
168
+ if "headers" in dct:
169
+ ret.headers = dct["headers"]
170
+ if "raw_headers" in dct:
171
+ ret.raw_headers = dct["raw_headers"]
172
+ if "should_close" in dct:
173
+ ret.should_close = dct["should_close"]
174
+ if "compression" in dct:
175
+ ret.compression = dct["compression"]
176
+ if "upgrade" in dct:
177
+ ret.upgrade = dct["upgrade"]
178
+ if "chunked" in dct:
179
+ ret.chunked = dct["chunked"]
180
+ if "url" in dct:
181
+ ret.url = dct["url"]
182
+ return ret
183
+
184
+ cdef _new_request_message(str method,
185
+ str path,
186
+ object version,
187
+ object headers,
188
+ object raw_headers,
189
+ bint should_close,
190
+ object compression,
191
+ bint upgrade,
192
+ bint chunked,
193
+ object url):
194
+ cdef RawRequestMessage ret
195
+ ret = RawRequestMessage.__new__(RawRequestMessage)
196
+ ret.method = method
197
+ ret.path = path
198
+ ret.version = version
199
+ ret.headers = headers
200
+ ret.raw_headers = raw_headers
201
+ ret.should_close = should_close
202
+ ret.compression = compression
203
+ ret.upgrade = upgrade
204
+ ret.chunked = chunked
205
+ ret.url = url
206
+ return ret
207
+
208
+
209
+ @cython.freelist(DEFAULT_FREELIST_SIZE)
210
+ cdef class RawResponseMessage:
211
+ cdef readonly object version # HttpVersion
212
+ cdef readonly int code
213
+ cdef readonly str reason
214
+ cdef readonly object headers # CIMultiDict
215
+ cdef readonly object raw_headers # tuple
216
+ cdef readonly object should_close
217
+ cdef readonly object compression
218
+ cdef readonly object upgrade
219
+ cdef readonly object chunked
220
+
221
+ def __init__(self, version, code, reason, headers, raw_headers,
222
+ should_close, compression, upgrade, chunked):
223
+ self.version = version
224
+ self.code = code
225
+ self.reason = reason
226
+ self.headers = headers
227
+ self.raw_headers = raw_headers
228
+ self.should_close = should_close
229
+ self.compression = compression
230
+ self.upgrade = upgrade
231
+ self.chunked = chunked
232
+
233
+ def __repr__(self):
234
+ info = []
235
+ info.append(("version", self.version))
236
+ info.append(("code", self.code))
237
+ info.append(("reason", self.reason))
238
+ info.append(("headers", self.headers))
239
+ info.append(("raw_headers", self.raw_headers))
240
+ info.append(("should_close", self.should_close))
241
+ info.append(("compression", self.compression))
242
+ info.append(("upgrade", self.upgrade))
243
+ info.append(("chunked", self.chunked))
244
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
245
+ return '<RawResponseMessage(' + sinfo + ')>'
246
+
247
+
248
+ cdef _new_response_message(object version,
249
+ int code,
250
+ str reason,
251
+ object headers,
252
+ object raw_headers,
253
+ bint should_close,
254
+ object compression,
255
+ bint upgrade,
256
+ bint chunked):
257
+ cdef RawResponseMessage ret
258
+ ret = RawResponseMessage.__new__(RawResponseMessage)
259
+ ret.version = version
260
+ ret.code = code
261
+ ret.reason = reason
262
+ ret.headers = headers
263
+ ret.raw_headers = raw_headers
264
+ ret.should_close = should_close
265
+ ret.compression = compression
266
+ ret.upgrade = upgrade
267
+ ret.chunked = chunked
268
+ return ret
269
+
270
+
271
+ @cython.internal
272
+ cdef class HttpParser:
273
+
274
+ cdef:
275
+ cparser.llhttp_t* _cparser
276
+ cparser.llhttp_settings_t* _csettings
277
+
278
+ bytearray _raw_name
279
+ bytearray _raw_value
280
+ bint _has_value
281
+
282
+ object _protocol
283
+ object _loop
284
+ object _timer
285
+
286
+ size_t _max_line_size
287
+ size_t _max_field_size
288
+ size_t _max_headers
289
+ bint _response_with_body
290
+ bint _read_until_eof
291
+
292
+ bint _started
293
+ object _url
294
+ bytearray _buf
295
+ str _path
296
+ str _reason
297
+ object _headers
298
+ list _raw_headers
299
+ bint _upgraded
300
+ list _messages
301
+ object _payload
302
+ bint _payload_error
303
+ object _payload_exception
304
+ object _last_error
305
+ bint _auto_decompress
306
+ int _limit
307
+
308
+ str _content_encoding
309
+
310
+ Py_buffer py_buf
311
+
312
+ def __cinit__(self):
313
+ self._cparser = <cparser.llhttp_t*> \
314
+ PyMem_Malloc(sizeof(cparser.llhttp_t))
315
+ if self._cparser is NULL:
316
+ raise MemoryError()
317
+
318
+ self._csettings = <cparser.llhttp_settings_t*> \
319
+ PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
320
+ if self._csettings is NULL:
321
+ raise MemoryError()
322
+
323
+ def __dealloc__(self):
324
+ PyMem_Free(self._cparser)
325
+ PyMem_Free(self._csettings)
326
+
327
+ cdef _init(
328
+ self, cparser.llhttp_type mode,
329
+ object protocol, object loop, int limit,
330
+ object timer=None,
331
+ size_t max_line_size=8190, size_t max_headers=32768,
332
+ size_t max_field_size=8190, payload_exception=None,
333
+ bint response_with_body=True, bint read_until_eof=False,
334
+ bint auto_decompress=True,
335
+ ):
336
+ cparser.llhttp_settings_init(self._csettings)
337
+ cparser.llhttp_init(self._cparser, mode, self._csettings)
338
+ self._cparser.data = <void*>self
339
+ self._cparser.content_length = 0
340
+
341
+ self._protocol = protocol
342
+ self._loop = loop
343
+ self._timer = timer
344
+
345
+ self._buf = bytearray()
346
+ self._payload = None
347
+ self._payload_error = 0
348
+ self._payload_exception = payload_exception
349
+ self._messages = []
350
+
351
+ self._raw_name = bytearray()
352
+ self._raw_value = bytearray()
353
+ self._has_value = False
354
+
355
+ self._max_line_size = max_line_size
356
+ self._max_headers = max_headers
357
+ self._max_field_size = max_field_size
358
+ self._response_with_body = response_with_body
359
+ self._read_until_eof = read_until_eof
360
+ self._upgraded = False
361
+ self._auto_decompress = auto_decompress
362
+ self._content_encoding = None
363
+
364
+ self._csettings.on_url = cb_on_url
365
+ self._csettings.on_status = cb_on_status
366
+ self._csettings.on_header_field = cb_on_header_field
367
+ self._csettings.on_header_value = cb_on_header_value
368
+ self._csettings.on_headers_complete = cb_on_headers_complete
369
+ self._csettings.on_body = cb_on_body
370
+ self._csettings.on_message_begin = cb_on_message_begin
371
+ self._csettings.on_message_complete = cb_on_message_complete
372
+ self._csettings.on_chunk_header = cb_on_chunk_header
373
+ self._csettings.on_chunk_complete = cb_on_chunk_complete
374
+
375
+ self._last_error = None
376
+ self._limit = limit
377
+
378
+ cdef _process_header(self):
379
+ if self._raw_name:
380
+ raw_name = bytes(self._raw_name)
381
+ raw_value = bytes(self._raw_value)
382
+
383
+ name = find_header(raw_name)
384
+ value = raw_value.decode('utf-8', 'surrogateescape')
385
+
386
+ self._headers.add(name, value)
387
+
388
+ if name is CONTENT_ENCODING:
389
+ self._content_encoding = value
390
+
391
+ PyByteArray_Resize(self._raw_name, 0)
392
+ PyByteArray_Resize(self._raw_value, 0)
393
+ self._has_value = False
394
+ self._raw_headers.append((raw_name, raw_value))
395
+
396
+ cdef _on_header_field(self, char* at, size_t length):
397
+ cdef Py_ssize_t size
398
+ cdef char *buf
399
+ if self._has_value:
400
+ self._process_header()
401
+
402
+ size = PyByteArray_Size(self._raw_name)
403
+ PyByteArray_Resize(self._raw_name, size + length)
404
+ buf = PyByteArray_AsString(self._raw_name)
405
+ memcpy(buf + size, at, length)
406
+
407
+ cdef _on_header_value(self, char* at, size_t length):
408
+ cdef Py_ssize_t size
409
+ cdef char *buf
410
+
411
+ size = PyByteArray_Size(self._raw_value)
412
+ PyByteArray_Resize(self._raw_value, size + length)
413
+ buf = PyByteArray_AsString(self._raw_value)
414
+ memcpy(buf + size, at, length)
415
+ self._has_value = True
416
+
417
+ cdef _on_headers_complete(self):
418
+ self._process_header()
419
+
420
+ method = http_method_str(self._cparser.method)
421
+ should_close = not cparser.llhttp_should_keep_alive(self._cparser)
422
+ upgrade = self._cparser.upgrade
423
+ chunked = self._cparser.flags & cparser.F_CHUNKED
424
+
425
+ raw_headers = tuple(self._raw_headers)
426
+ headers = CIMultiDictProxy(self._headers)
427
+
428
+ if upgrade or self._cparser.method == cparser.HTTP_CONNECT:
429
+ self._upgraded = True
430
+
431
+ # do not support old websocket spec
432
+ if SEC_WEBSOCKET_KEY1 in headers:
433
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
434
+
435
+ encoding = None
436
+ enc = self._content_encoding
437
+ if enc is not None:
438
+ self._content_encoding = None
439
+ enc = enc.lower()
440
+ if enc in ('gzip', 'deflate', 'br'):
441
+ encoding = enc
442
+
443
+ if self._cparser.type == cparser.HTTP_REQUEST:
444
+ msg = _new_request_message(
445
+ method, self._path,
446
+ self.http_version(), headers, raw_headers,
447
+ should_close, encoding, upgrade, chunked, self._url)
448
+ else:
449
+ msg = _new_response_message(
450
+ self.http_version(), self._cparser.status_code, self._reason,
451
+ headers, raw_headers, should_close, encoding,
452
+ upgrade, chunked)
453
+
454
+ if (
455
+ ULLONG_MAX > self._cparser.content_length > 0 or chunked or
456
+ self._cparser.method == cparser.HTTP_CONNECT or
457
+ (self._cparser.status_code >= 199 and
458
+ self._cparser.content_length == 0 and
459
+ self._read_until_eof)
460
+ ):
461
+ payload = StreamReader(
462
+ self._protocol, timer=self._timer, loop=self._loop,
463
+ limit=self._limit)
464
+ else:
465
+ payload = EMPTY_PAYLOAD
466
+
467
+ self._payload = payload
468
+ if encoding is not None and self._auto_decompress:
469
+ self._payload = DeflateBuffer(payload, encoding)
470
+
471
+ if not self._response_with_body:
472
+ payload = EMPTY_PAYLOAD
473
+
474
+ self._messages.append((msg, payload))
475
+
476
+ cdef _on_message_complete(self):
477
+ self._payload.feed_eof()
478
+ self._payload = None
479
+
480
+ cdef _on_chunk_header(self):
481
+ self._payload.begin_http_chunk_receiving()
482
+
483
+ cdef _on_chunk_complete(self):
484
+ self._payload.end_http_chunk_receiving()
485
+
486
+ cdef object _on_status_complete(self):
487
+ pass
488
+
489
+ cdef inline http_version(self):
490
+ cdef cparser.llhttp_t* parser = self._cparser
491
+
492
+ if parser.http_major == 1:
493
+ if parser.http_minor == 0:
494
+ return HttpVersion10
495
+ elif parser.http_minor == 1:
496
+ return HttpVersion11
497
+
498
+ return HttpVersion(parser.http_major, parser.http_minor)
499
+
500
+ ### Public API ###
501
+
502
+ def feed_eof(self):
503
+ cdef bytes desc
504
+
505
+ if self._payload is not None:
506
+ if self._cparser.flags & cparser.F_CHUNKED:
507
+ raise TransferEncodingError(
508
+ "Not enough data for satisfy transfer length header.")
509
+ elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
510
+ raise ContentLengthError(
511
+ "Not enough data for satisfy content length header.")
512
+ elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
513
+ desc = cparser.llhttp_get_error_reason(self._cparser)
514
+ raise PayloadEncodingError(desc.decode('latin-1'))
515
+ else:
516
+ self._payload.feed_eof()
517
+ elif self._started:
518
+ self._on_headers_complete()
519
+ if self._messages:
520
+ return self._messages[-1][0]
521
+
522
+ def feed_data(self, data):
523
+ cdef:
524
+ size_t data_len
525
+ size_t nb
526
+ cdef cparser.llhttp_errno_t errno
527
+
528
+ PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
529
+ data_len = <size_t>self.py_buf.len
530
+
531
+ errno = cparser.llhttp_execute(
532
+ self._cparser,
533
+ <char*>self.py_buf.buf,
534
+ data_len)
535
+
536
+ if errno is cparser.HPE_PAUSED_UPGRADE:
537
+ cparser.llhttp_resume_after_upgrade(self._cparser)
538
+
539
+ nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
540
+
541
+ PyBuffer_Release(&self.py_buf)
542
+
543
+ if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
544
+ if self._payload_error == 0:
545
+ if self._last_error is not None:
546
+ ex = self._last_error
547
+ self._last_error = None
548
+ else:
549
+ after = cparser.llhttp_get_error_pos(self._cparser)
550
+ before = data[:after - <char*>self.py_buf.buf]
551
+ after_b = after.split(b"\r\n", 1)[0]
552
+ before = before.rsplit(b"\r\n", 1)[-1]
553
+ data = before + after_b
554
+ pointer = " " * (len(repr(before))-1) + "^"
555
+ ex = parser_error_from_errno(self._cparser, data, pointer)
556
+ self._payload = None
557
+ raise ex
558
+
559
+ if self._messages:
560
+ messages = self._messages
561
+ self._messages = []
562
+ else:
563
+ messages = ()
564
+
565
+ if self._upgraded:
566
+ return messages, True, data[nb:]
567
+ else:
568
+ return messages, False, b''
569
+
570
+ def set_upgraded(self, val):
571
+ self._upgraded = val
572
+
573
+
574
+ cdef class HttpRequestParser(HttpParser):
575
+
576
+ def __init__(
577
+ self, protocol, loop, int limit, timer=None,
578
+ size_t max_line_size=8190, size_t max_headers=32768,
579
+ size_t max_field_size=8190, payload_exception=None,
580
+ bint response_with_body=True, bint read_until_eof=False,
581
+ bint auto_decompress=True,
582
+ ):
583
+ self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
584
+ max_line_size, max_headers, max_field_size,
585
+ payload_exception, response_with_body, read_until_eof,
586
+ auto_decompress)
587
+
588
+ cdef object _on_status_complete(self):
589
+ cdef int idx1, idx2
590
+ if not self._buf:
591
+ return
592
+ self._path = self._buf.decode('utf-8', 'surrogateescape')
593
+ try:
594
+ idx3 = len(self._path)
595
+ if self._cparser.method == cparser.HTTP_CONNECT:
596
+ # authority-form,
597
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
598
+ self._url = URL.build(authority=self._path, encoded=True)
599
+ elif idx3 > 1 and self._path[0] == '/':
600
+ # origin-form,
601
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
602
+ idx1 = self._path.find("?")
603
+ if idx1 == -1:
604
+ query = ""
605
+ idx2 = self._path.find("#")
606
+ if idx2 == -1:
607
+ path = self._path
608
+ fragment = ""
609
+ else:
610
+ path = self._path[0: idx2]
611
+ fragment = self._path[idx2+1:]
612
+
613
+ else:
614
+ path = self._path[0:idx1]
615
+ idx1 += 1
616
+ idx2 = self._path.find("#", idx1+1)
617
+ if idx2 == -1:
618
+ query = self._path[idx1:]
619
+ fragment = ""
620
+ else:
621
+ query = self._path[idx1: idx2]
622
+ fragment = self._path[idx2+1:]
623
+
624
+ self._url = URL.build(
625
+ path=path,
626
+ query_string=query,
627
+ fragment=fragment,
628
+ encoded=True,
629
+ )
630
+ else:
631
+ # absolute-form for proxy maybe,
632
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
633
+ self._url = URL(self._path, encoded=True)
634
+ finally:
635
+ PyByteArray_Resize(self._buf, 0)
636
+
637
+
638
+ cdef class HttpResponseParser(HttpParser):
639
+
640
+ def __init__(
641
+ self, protocol, loop, int limit, timer=None,
642
+ size_t max_line_size=8190, size_t max_headers=32768,
643
+ size_t max_field_size=8190, payload_exception=None,
644
+ bint response_with_body=True, bint read_until_eof=False,
645
+ bint auto_decompress=True
646
+ ):
647
+ self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
648
+ max_line_size, max_headers, max_field_size,
649
+ payload_exception, response_with_body, read_until_eof,
650
+ auto_decompress)
651
+ # Use strict parsing on dev mode, so users are warned about broken servers.
652
+ if not DEBUG:
653
+ cparser.llhttp_set_lenient_headers(self._cparser, 1)
654
+ cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
655
+ cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
656
+
657
+ cdef object _on_status_complete(self):
658
+ if self._buf:
659
+ self._reason = self._buf.decode('utf-8', 'surrogateescape')
660
+ PyByteArray_Resize(self._buf, 0)
661
+ else:
662
+ self._reason = self._reason or ''
663
+
664
+ cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
665
+ cdef HttpParser pyparser = <HttpParser>parser.data
666
+
667
+ pyparser._started = True
668
+ pyparser._headers = CIMultiDict()
669
+ pyparser._raw_headers = []
670
+ PyByteArray_Resize(pyparser._buf, 0)
671
+ pyparser._path = None
672
+ pyparser._reason = None
673
+ return 0
674
+
675
+
676
+ cdef int cb_on_url(cparser.llhttp_t* parser,
677
+ const char *at, size_t length) except -1:
678
+ cdef HttpParser pyparser = <HttpParser>parser.data
679
+ try:
680
+ if length > pyparser._max_line_size:
681
+ raise LineTooLong(
682
+ 'Status line is too long', pyparser._max_line_size, length)
683
+ extend(pyparser._buf, at, length)
684
+ except BaseException as ex:
685
+ pyparser._last_error = ex
686
+ return -1
687
+ else:
688
+ return 0
689
+
690
+
691
+ cdef int cb_on_status(cparser.llhttp_t* parser,
692
+ const char *at, size_t length) except -1:
693
+ cdef HttpParser pyparser = <HttpParser>parser.data
694
+ cdef str reason
695
+ try:
696
+ if length > pyparser._max_line_size:
697
+ raise LineTooLong(
698
+ 'Status line is too long', pyparser._max_line_size, length)
699
+ extend(pyparser._buf, at, length)
700
+ except BaseException as ex:
701
+ pyparser._last_error = ex
702
+ return -1
703
+ else:
704
+ return 0
705
+
706
+
707
+ cdef int cb_on_header_field(cparser.llhttp_t* parser,
708
+ const char *at, size_t length) except -1:
709
+ cdef HttpParser pyparser = <HttpParser>parser.data
710
+ cdef Py_ssize_t size
711
+ try:
712
+ pyparser._on_status_complete()
713
+ size = len(pyparser._raw_name) + length
714
+ if size > pyparser._max_field_size:
715
+ raise LineTooLong(
716
+ 'Header name is too long', pyparser._max_field_size, size)
717
+ pyparser._on_header_field(at, length)
718
+ except BaseException as ex:
719
+ pyparser._last_error = ex
720
+ return -1
721
+ else:
722
+ return 0
723
+
724
+
725
+ cdef int cb_on_header_value(cparser.llhttp_t* parser,
726
+ const char *at, size_t length) except -1:
727
+ cdef HttpParser pyparser = <HttpParser>parser.data
728
+ cdef Py_ssize_t size
729
+ try:
730
+ size = len(pyparser._raw_value) + length
731
+ if size > pyparser._max_field_size:
732
+ raise LineTooLong(
733
+ 'Header value is too long', pyparser._max_field_size, size)
734
+ pyparser._on_header_value(at, length)
735
+ except BaseException as ex:
736
+ pyparser._last_error = ex
737
+ return -1
738
+ else:
739
+ return 0
740
+
741
+
742
+ cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
743
+ cdef HttpParser pyparser = <HttpParser>parser.data
744
+ try:
745
+ pyparser._on_status_complete()
746
+ pyparser._on_headers_complete()
747
+ except BaseException as exc:
748
+ pyparser._last_error = exc
749
+ return -1
750
+ else:
751
+ if (
752
+ pyparser._cparser.upgrade or
753
+ pyparser._cparser.method == cparser.HTTP_CONNECT
754
+ ):
755
+ return 2
756
+ else:
757
+ return 0
758
+
759
+
760
+ cdef int cb_on_body(cparser.llhttp_t* parser,
761
+ const char *at, size_t length) except -1:
762
+ cdef HttpParser pyparser = <HttpParser>parser.data
763
+ cdef bytes body = at[:length]
764
+ try:
765
+ pyparser._payload.feed_data(body, length)
766
+ except BaseException as underlying_exc:
767
+ reraised_exc = underlying_exc
768
+ if pyparser._payload_exception is not None:
769
+ reraised_exc = pyparser._payload_exception(str(underlying_exc))
770
+
771
+ set_exception(pyparser._payload, reraised_exc, underlying_exc)
772
+
773
+ pyparser._payload_error = 1
774
+ return -1
775
+ else:
776
+ return 0
777
+
778
+
779
+ cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
780
+ cdef HttpParser pyparser = <HttpParser>parser.data
781
+ try:
782
+ pyparser._started = False
783
+ pyparser._on_message_complete()
784
+ except BaseException as exc:
785
+ pyparser._last_error = exc
786
+ return -1
787
+ else:
788
+ return 0
789
+
790
+
791
+ cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
792
+ cdef HttpParser pyparser = <HttpParser>parser.data
793
+ try:
794
+ pyparser._on_chunk_header()
795
+ except BaseException as exc:
796
+ pyparser._last_error = exc
797
+ return -1
798
+ else:
799
+ return 0
800
+
801
+
802
+ cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
803
+ cdef HttpParser pyparser = <HttpParser>parser.data
804
+ try:
805
+ pyparser._on_chunk_complete()
806
+ except BaseException as exc:
807
+ pyparser._last_error = exc
808
+ return -1
809
+ else:
810
+ return 0
811
+
812
+
813
+ cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
814
+ cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
815
+ cdef bytes desc = cparser.llhttp_get_error_reason(parser)
816
+
817
+ err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
818
+
819
+ if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
820
+ cparser.HPE_CB_HEADERS_COMPLETE,
821
+ cparser.HPE_CB_MESSAGE_COMPLETE,
822
+ cparser.HPE_CB_CHUNK_HEADER,
823
+ cparser.HPE_CB_CHUNK_COMPLETE,
824
+ cparser.HPE_INVALID_CONSTANT,
825
+ cparser.HPE_INVALID_HEADER_TOKEN,
826
+ cparser.HPE_INVALID_CONTENT_LENGTH,
827
+ cparser.HPE_INVALID_CHUNK_SIZE,
828
+ cparser.HPE_INVALID_EOF_STATE,
829
+ cparser.HPE_INVALID_TRANSFER_ENCODING}:
830
+ return BadHttpMessage(err_msg)
831
+ elif errno in {cparser.HPE_INVALID_STATUS,
832
+ cparser.HPE_INVALID_METHOD,
833
+ cparser.HPE_INVALID_VERSION}:
834
+ return BadStatusLine(error=err_msg)
835
+ elif errno == cparser.HPE_INVALID_URL:
836
+ return InvalidURLError(err_msg)
837
+
838
+ return BadHttpMessage(err_msg)
venv/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (459 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/_http_writer.pyx ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython.bytes cimport PyBytes_FromStringAndSize
2
+ from cpython.exc cimport PyErr_NoMemory
3
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
4
+ from cpython.object cimport PyObject_Str
5
+ from libc.stdint cimport uint8_t, uint64_t
6
+ from libc.string cimport memcpy
7
+
8
+ from multidict import istr
9
+
10
+ DEF BUF_SIZE = 16 * 1024 # 16KiB
11
+ cdef char BUFFER[BUF_SIZE]
12
+
13
+ cdef object _istr = istr
14
+
15
+
16
+ # ----------------- writer ---------------------------
17
+
18
+ cdef struct Writer:
19
+ char *buf
20
+ Py_ssize_t size
21
+ Py_ssize_t pos
22
+
23
+
24
+ cdef inline void _init_writer(Writer* writer):
25
+ writer.buf = &BUFFER[0]
26
+ writer.size = BUF_SIZE
27
+ writer.pos = 0
28
+
29
+
30
+ cdef inline void _release_writer(Writer* writer):
31
+ if writer.buf != BUFFER:
32
+ PyMem_Free(writer.buf)
33
+
34
+
35
+ cdef inline int _write_byte(Writer* writer, uint8_t ch):
36
+ cdef char * buf
37
+ cdef Py_ssize_t size
38
+
39
+ if writer.pos == writer.size:
40
+ # reallocate
41
+ size = writer.size + BUF_SIZE
42
+ if writer.buf == BUFFER:
43
+ buf = <char*>PyMem_Malloc(size)
44
+ if buf == NULL:
45
+ PyErr_NoMemory()
46
+ return -1
47
+ memcpy(buf, writer.buf, writer.size)
48
+ else:
49
+ buf = <char*>PyMem_Realloc(writer.buf, size)
50
+ if buf == NULL:
51
+ PyErr_NoMemory()
52
+ return -1
53
+ writer.buf = buf
54
+ writer.size = size
55
+ writer.buf[writer.pos] = <char>ch
56
+ writer.pos += 1
57
+ return 0
58
+
59
+
60
+ cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
61
+ cdef uint64_t utf = <uint64_t> symbol
62
+
63
+ if utf < 0x80:
64
+ return _write_byte(writer, <uint8_t>utf)
65
+ elif utf < 0x800:
66
+ if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
67
+ return -1
68
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
69
+ elif 0xD800 <= utf <= 0xDFFF:
70
+ # surogate pair, ignored
71
+ return 0
72
+ elif utf < 0x10000:
73
+ if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
74
+ return -1
75
+ if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
76
+ return -1
77
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
78
+ elif utf > 0x10FFFF:
79
+ # symbol is too large
80
+ return 0
81
+ else:
82
+ if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
83
+ return -1
84
+ if _write_byte(writer,
85
+ <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
86
+ return -1
87
+ if _write_byte(writer,
88
+ <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
89
+ return -1
90
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
91
+
92
+
93
+ cdef inline int _write_str(Writer* writer, str s):
94
+ cdef Py_UCS4 ch
95
+ for ch in s:
96
+ if _write_utf8(writer, ch) < 0:
97
+ return -1
98
+
99
+
100
+ # --------------- _serialize_headers ----------------------
101
+
102
+ cdef str to_str(object s):
103
+ typ = type(s)
104
+ if typ is str:
105
+ return <str>s
106
+ elif typ is _istr:
107
+ return PyObject_Str(s)
108
+ elif not isinstance(s, str):
109
+ raise TypeError("Cannot serialize non-str key {!r}".format(s))
110
+ else:
111
+ return str(s)
112
+
113
+
114
+ cdef void _safe_header(str string) except *:
115
+ if "\r" in string or "\n" in string:
116
+ raise ValueError(
117
+ "Newline or carriage return character detected in HTTP status message or "
118
+ "header. This is a potential security issue."
119
+ )
120
+
121
+
122
+ def _serialize_headers(str status_line, headers):
123
+ cdef Writer writer
124
+ cdef object key
125
+ cdef object val
126
+ cdef bytes ret
127
+
128
+ _init_writer(&writer)
129
+
130
+ for key, val in headers.items():
131
+ _safe_header(to_str(key))
132
+ _safe_header(to_str(val))
133
+
134
+ try:
135
+ if _write_str(&writer, status_line) < 0:
136
+ raise
137
+ if _write_byte(&writer, b'\r') < 0:
138
+ raise
139
+ if _write_byte(&writer, b'\n') < 0:
140
+ raise
141
+
142
+ for key, val in headers.items():
143
+ if _write_str(&writer, to_str(key)) < 0:
144
+ raise
145
+ if _write_byte(&writer, b':') < 0:
146
+ raise
147
+ if _write_byte(&writer, b' ') < 0:
148
+ raise
149
+ if _write_str(&writer, to_str(val)) < 0:
150
+ raise
151
+ if _write_byte(&writer, b'\r') < 0:
152
+ raise
153
+ if _write_byte(&writer, b'\n') < 0:
154
+ raise
155
+
156
+ if _write_byte(&writer, b'\r') < 0:
157
+ raise
158
+ if _write_byte(&writer, b'\n') < 0:
159
+ raise
160
+
161
+ return PyBytes_FromStringAndSize(writer.buf, writer.pos)
162
+ finally:
163
+ _release_writer(&writer)
venv/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (234 kB). View file
 
venv/lib/python3.10/site-packages/aiohttp/_websocket.pyx ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython cimport PyBytes_AsString
2
+
3
+
4
+ #from cpython cimport PyByteArray_AsString # cython still not exports that
5
+ cdef extern from "Python.h":
6
+ char* PyByteArray_AsString(bytearray ba) except NULL
7
+
8
+ from libc.stdint cimport uint32_t, uint64_t, uintmax_t
9
+
10
+
11
+ def _websocket_mask_cython(object mask, object data):
12
+ """Note, this function mutates its `data` argument
13
+ """
14
+ cdef:
15
+ Py_ssize_t data_len, i
16
+ # bit operations on signed integers are implementation-specific
17
+ unsigned char * in_buf
18
+ const unsigned char * mask_buf
19
+ uint32_t uint32_msk
20
+ uint64_t uint64_msk
21
+
22
+ assert len(mask) == 4
23
+
24
+ if not isinstance(mask, bytes):
25
+ mask = bytes(mask)
26
+
27
+ if isinstance(data, bytearray):
28
+ data = <bytearray>data
29
+ else:
30
+ data = bytearray(data)
31
+
32
+ data_len = len(data)
33
+ in_buf = <unsigned char*>PyByteArray_AsString(data)
34
+ mask_buf = <const unsigned char*>PyBytes_AsString(mask)
35
+ uint32_msk = (<uint32_t*>mask_buf)[0]
36
+
37
+ # TODO: align in_data ptr to achieve even faster speeds
38
+ # does it need in python ?! malloc() always aligns to sizeof(long) bytes
39
+
40
+ if sizeof(size_t) >= 8:
41
+ uint64_msk = uint32_msk
42
+ uint64_msk = (uint64_msk << 32) | uint32_msk
43
+
44
+ while data_len >= 8:
45
+ (<uint64_t*>in_buf)[0] ^= uint64_msk
46
+ in_buf += 8
47
+ data_len -= 8
48
+
49
+
50
+ while data_len >= 4:
51
+ (<uint32_t*>in_buf)[0] ^= uint32_msk
52
+ in_buf += 4
53
+ data_len -= 4
54
+
55
+ for i in range(0, data_len):
56
+ in_buf[i] ^= mask_buf[i]
venv/lib/python3.10/site-packages/aiohttp/abc.py ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import logging
3
+ from abc import ABC, abstractmethod
4
+ from collections.abc import Sized
5
+ from http.cookies import BaseCookie, Morsel
6
+ from typing import (
7
+ TYPE_CHECKING,
8
+ Any,
9
+ Awaitable,
10
+ Callable,
11
+ Dict,
12
+ Generator,
13
+ Iterable,
14
+ List,
15
+ Optional,
16
+ Tuple,
17
+ )
18
+
19
+ from multidict import CIMultiDict
20
+ from yarl import URL
21
+
22
+ from .helpers import get_running_loop
23
+ from .typedefs import LooseCookies
24
+
25
+ if TYPE_CHECKING:
26
+ from .web_app import Application
27
+ from .web_exceptions import HTTPException
28
+ from .web_request import BaseRequest, Request
29
+ from .web_response import StreamResponse
30
+ else:
31
+ BaseRequest = Request = Application = StreamResponse = None
32
+ HTTPException = None
33
+
34
+
35
+ class AbstractRouter(ABC):
36
+ def __init__(self) -> None:
37
+ self._frozen = False
38
+
39
+ def post_init(self, app: Application) -> None:
40
+ """Post init stage.
41
+
42
+ Not an abstract method for sake of backward compatibility,
43
+ but if the router wants to be aware of the application
44
+ it can override this.
45
+ """
46
+
47
+ @property
48
+ def frozen(self) -> bool:
49
+ return self._frozen
50
+
51
+ def freeze(self) -> None:
52
+ """Freeze router."""
53
+ self._frozen = True
54
+
55
+ @abstractmethod
56
+ async def resolve(self, request: Request) -> "AbstractMatchInfo":
57
+ """Return MATCH_INFO for given request"""
58
+
59
+
60
+ class AbstractMatchInfo(ABC):
61
+ @property # pragma: no branch
62
+ @abstractmethod
63
+ def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
64
+ """Execute matched request handler"""
65
+
66
+ @property
67
+ @abstractmethod
68
+ def expect_handler(
69
+ self,
70
+ ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
71
+ """Expect handler for 100-continue processing"""
72
+
73
+ @property # pragma: no branch
74
+ @abstractmethod
75
+ def http_exception(self) -> Optional[HTTPException]:
76
+ """HTTPException instance raised on router's resolving, or None"""
77
+
78
+ @abstractmethod # pragma: no branch
79
+ def get_info(self) -> Dict[str, Any]:
80
+ """Return a dict with additional info useful for introspection"""
81
+
82
+ @property # pragma: no branch
83
+ @abstractmethod
84
+ def apps(self) -> Tuple[Application, ...]:
85
+ """Stack of nested applications.
86
+
87
+ Top level application is left-most element.
88
+
89
+ """
90
+
91
+ @abstractmethod
92
+ def add_app(self, app: Application) -> None:
93
+ """Add application to the nested apps stack."""
94
+
95
+ @abstractmethod
96
+ def freeze(self) -> None:
97
+ """Freeze the match info.
98
+
99
+ The method is called after route resolution.
100
+
101
+ After the call .add_app() is forbidden.
102
+
103
+ """
104
+
105
+
106
+ class AbstractView(ABC):
107
+ """Abstract class based view."""
108
+
109
+ def __init__(self, request: Request) -> None:
110
+ self._request = request
111
+
112
+ @property
113
+ def request(self) -> Request:
114
+ """Request instance."""
115
+ return self._request
116
+
117
+ @abstractmethod
118
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
119
+ """Execute the view handler."""
120
+
121
+
122
+ class AbstractResolver(ABC):
123
+ """Abstract DNS resolver."""
124
+
125
+ @abstractmethod
126
+ async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
127
+ """Return IP address for given hostname"""
128
+
129
+ @abstractmethod
130
+ async def close(self) -> None:
131
+ """Release resolver"""
132
+
133
+
134
+ if TYPE_CHECKING:
135
+ IterableBase = Iterable[Morsel[str]]
136
+ else:
137
+ IterableBase = Iterable
138
+
139
+
140
+ ClearCookiePredicate = Callable[["Morsel[str]"], bool]
141
+
142
+
143
+ class AbstractCookieJar(Sized, IterableBase):
144
+ """Abstract Cookie Jar."""
145
+
146
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
147
+ self._loop = get_running_loop(loop)
148
+
149
+ @abstractmethod
150
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
151
+ """Clear all cookies if no predicate is passed."""
152
+
153
+ @abstractmethod
154
+ def clear_domain(self, domain: str) -> None:
155
+ """Clear all cookies for domain and all subdomains."""
156
+
157
+ @abstractmethod
158
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
159
+ """Update cookies."""
160
+
161
+ @abstractmethod
162
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
163
+ """Return the jar's cookies filtered by their attributes."""
164
+
165
+
166
+ class AbstractStreamWriter(ABC):
167
+ """Abstract stream writer."""
168
+
169
+ buffer_size = 0
170
+ output_size = 0
171
+ length: Optional[int] = 0
172
+
173
+ @abstractmethod
174
+ async def write(self, chunk: bytes) -> None:
175
+ """Write chunk into stream."""
176
+
177
+ @abstractmethod
178
+ async def write_eof(self, chunk: bytes = b"") -> None:
179
+ """Write last chunk."""
180
+
181
+ @abstractmethod
182
+ async def drain(self) -> None:
183
+ """Flush the write buffer."""
184
+
185
+ @abstractmethod
186
+ def enable_compression(self, encoding: str = "deflate") -> None:
187
+ """Enable HTTP body compression"""
188
+
189
+ @abstractmethod
190
+ def enable_chunking(self) -> None:
191
+ """Enable HTTP chunked mode"""
192
+
193
+ @abstractmethod
194
+ async def write_headers(
195
+ self, status_line: str, headers: "CIMultiDict[str]"
196
+ ) -> None:
197
+ """Write HTTP headers"""
198
+
199
+
200
+ class AbstractAccessLogger(ABC):
201
+ """Abstract writer to access log."""
202
+
203
+ def __init__(self, logger: logging.Logger, log_format: str) -> None:
204
+ self.logger = logger
205
+ self.log_format = log_format
206
+
207
+ @abstractmethod
208
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
209
+ """Emit log to logger."""
venv/lib/python3.10/site-packages/aiohttp/base_protocol.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from typing import Optional, cast
3
+
4
+ from .helpers import set_exception
5
+ from .tcp_helpers import tcp_nodelay
6
+
7
+
8
+ class BaseProtocol(asyncio.Protocol):
9
+ __slots__ = (
10
+ "_loop",
11
+ "_paused",
12
+ "_drain_waiter",
13
+ "_connection_lost",
14
+ "_reading_paused",
15
+ "transport",
16
+ )
17
+
18
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
19
+ self._loop: asyncio.AbstractEventLoop = loop
20
+ self._paused = False
21
+ self._drain_waiter: Optional[asyncio.Future[None]] = None
22
+ self._reading_paused = False
23
+
24
+ self.transport: Optional[asyncio.Transport] = None
25
+
26
+ @property
27
+ def connected(self) -> bool:
28
+ """Return True if the connection is open."""
29
+ return self.transport is not None
30
+
31
+ def pause_writing(self) -> None:
32
+ assert not self._paused
33
+ self._paused = True
34
+
35
+ def resume_writing(self) -> None:
36
+ assert self._paused
37
+ self._paused = False
38
+
39
+ waiter = self._drain_waiter
40
+ if waiter is not None:
41
+ self._drain_waiter = None
42
+ if not waiter.done():
43
+ waiter.set_result(None)
44
+
45
+ def pause_reading(self) -> None:
46
+ if not self._reading_paused and self.transport is not None:
47
+ try:
48
+ self.transport.pause_reading()
49
+ except (AttributeError, NotImplementedError, RuntimeError):
50
+ pass
51
+ self._reading_paused = True
52
+
53
+ def resume_reading(self) -> None:
54
+ if self._reading_paused and self.transport is not None:
55
+ try:
56
+ self.transport.resume_reading()
57
+ except (AttributeError, NotImplementedError, RuntimeError):
58
+ pass
59
+ self._reading_paused = False
60
+
61
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
62
+ tr = cast(asyncio.Transport, transport)
63
+ tcp_nodelay(tr, True)
64
+ self.transport = tr
65
+
66
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
67
+ # Wake up the writer if currently paused.
68
+ self.transport = None
69
+ if not self._paused:
70
+ return
71
+ waiter = self._drain_waiter
72
+ if waiter is None:
73
+ return
74
+ self._drain_waiter = None
75
+ if waiter.done():
76
+ return
77
+ if exc is None:
78
+ waiter.set_result(None)
79
+ else:
80
+ set_exception(
81
+ waiter,
82
+ ConnectionError("Connection lost"),
83
+ exc,
84
+ )
85
+
86
+ async def _drain_helper(self) -> None:
87
+ if not self.connected:
88
+ raise ConnectionResetError("Connection lost")
89
+ if not self._paused:
90
+ return
91
+ waiter = self._drain_waiter
92
+ if waiter is None:
93
+ waiter = self._loop.create_future()
94
+ self._drain_waiter = waiter
95
+ await asyncio.shield(waiter)
venv/lib/python3.10/site-packages/aiohttp/client.py ADDED
@@ -0,0 +1,1366 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP Client for asyncio."""
2
+
3
+ import asyncio
4
+ import base64
5
+ import hashlib
6
+ import json
7
+ import os
8
+ import sys
9
+ import traceback
10
+ import warnings
11
+ from contextlib import suppress
12
+ from types import SimpleNamespace, TracebackType
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Awaitable,
17
+ Callable,
18
+ Coroutine,
19
+ Final,
20
+ FrozenSet,
21
+ Generator,
22
+ Generic,
23
+ Iterable,
24
+ List,
25
+ Mapping,
26
+ Optional,
27
+ Set,
28
+ Tuple,
29
+ Type,
30
+ TypeVar,
31
+ Union,
32
+ )
33
+
34
+ import attr
35
+ from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
36
+ from yarl import URL
37
+
38
+ from . import hdrs, http, payload
39
+ from .abc import AbstractCookieJar
40
+ from .client_exceptions import (
41
+ ClientConnectionError as ClientConnectionError,
42
+ ClientConnectorCertificateError as ClientConnectorCertificateError,
43
+ ClientConnectorError as ClientConnectorError,
44
+ ClientConnectorSSLError as ClientConnectorSSLError,
45
+ ClientError as ClientError,
46
+ ClientHttpProxyError as ClientHttpProxyError,
47
+ ClientOSError as ClientOSError,
48
+ ClientPayloadError as ClientPayloadError,
49
+ ClientProxyConnectionError as ClientProxyConnectionError,
50
+ ClientResponseError as ClientResponseError,
51
+ ClientSSLError as ClientSSLError,
52
+ ContentTypeError as ContentTypeError,
53
+ InvalidURL as InvalidURL,
54
+ ServerConnectionError as ServerConnectionError,
55
+ ServerDisconnectedError as ServerDisconnectedError,
56
+ ServerFingerprintMismatch as ServerFingerprintMismatch,
57
+ ServerTimeoutError as ServerTimeoutError,
58
+ TooManyRedirects as TooManyRedirects,
59
+ WSServerHandshakeError as WSServerHandshakeError,
60
+ )
61
+ from .client_reqrep import (
62
+ ClientRequest as ClientRequest,
63
+ ClientResponse as ClientResponse,
64
+ Fingerprint as Fingerprint,
65
+ RequestInfo as RequestInfo,
66
+ _merge_ssl_params,
67
+ )
68
+ from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
69
+ from .connector import (
70
+ BaseConnector as BaseConnector,
71
+ NamedPipeConnector as NamedPipeConnector,
72
+ TCPConnector as TCPConnector,
73
+ UnixConnector as UnixConnector,
74
+ )
75
+ from .cookiejar import CookieJar
76
+ from .helpers import (
77
+ _SENTINEL,
78
+ DEBUG,
79
+ BasicAuth,
80
+ TimeoutHandle,
81
+ ceil_timeout,
82
+ get_env_proxy_for_url,
83
+ get_running_loop,
84
+ method_must_be_empty_body,
85
+ sentinel,
86
+ strip_auth_from_url,
87
+ )
88
+ from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
89
+ from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
90
+ from .streams import FlowControlDataQueue
91
+ from .tracing import Trace, TraceConfig
92
+ from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
93
+
94
+ __all__ = (
95
+ # client_exceptions
96
+ "ClientConnectionError",
97
+ "ClientConnectorCertificateError",
98
+ "ClientConnectorError",
99
+ "ClientConnectorSSLError",
100
+ "ClientError",
101
+ "ClientHttpProxyError",
102
+ "ClientOSError",
103
+ "ClientPayloadError",
104
+ "ClientProxyConnectionError",
105
+ "ClientResponseError",
106
+ "ClientSSLError",
107
+ "ContentTypeError",
108
+ "InvalidURL",
109
+ "ServerConnectionError",
110
+ "ServerDisconnectedError",
111
+ "ServerFingerprintMismatch",
112
+ "ServerTimeoutError",
113
+ "TooManyRedirects",
114
+ "WSServerHandshakeError",
115
+ # client_reqrep
116
+ "ClientRequest",
117
+ "ClientResponse",
118
+ "Fingerprint",
119
+ "RequestInfo",
120
+ # connector
121
+ "BaseConnector",
122
+ "TCPConnector",
123
+ "UnixConnector",
124
+ "NamedPipeConnector",
125
+ # client_ws
126
+ "ClientWebSocketResponse",
127
+ # client
128
+ "ClientSession",
129
+ "ClientTimeout",
130
+ "request",
131
+ )
132
+
133
+
134
+ if TYPE_CHECKING:
135
+ from ssl import SSLContext
136
+ else:
137
+ SSLContext = None
138
+
139
+
140
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
141
+ class ClientTimeout:
142
+ total: Optional[float] = None
143
+ connect: Optional[float] = None
144
+ sock_read: Optional[float] = None
145
+ sock_connect: Optional[float] = None
146
+ ceil_threshold: float = 5
147
+
148
+ # pool_queue_timeout: Optional[float] = None
149
+ # dns_resolution_timeout: Optional[float] = None
150
+ # socket_connect_timeout: Optional[float] = None
151
+ # connection_acquiring_timeout: Optional[float] = None
152
+ # new_connection_timeout: Optional[float] = None
153
+ # http_header_timeout: Optional[float] = None
154
+ # response_body_timeout: Optional[float] = None
155
+
156
+ # to create a timeout specific for a single request, either
157
+ # - create a completely new one to overwrite the default
158
+ # - or use http://www.attrs.org/en/stable/api.html#attr.evolve
159
+ # to overwrite the defaults
160
+
161
+
162
+ # 5 Minute default read timeout
163
+ DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
164
+
165
+ _RetType = TypeVar("_RetType")
166
+ _CharsetResolver = Callable[[ClientResponse, bytes], str]
167
+
168
+
169
+ class ClientSession:
170
+ """First-class interface for making HTTP requests."""
171
+
172
+ ATTRS = frozenset(
173
+ [
174
+ "_base_url",
175
+ "_source_traceback",
176
+ "_connector",
177
+ "requote_redirect_url",
178
+ "_loop",
179
+ "_cookie_jar",
180
+ "_connector_owner",
181
+ "_default_auth",
182
+ "_version",
183
+ "_json_serialize",
184
+ "_requote_redirect_url",
185
+ "_timeout",
186
+ "_raise_for_status",
187
+ "_auto_decompress",
188
+ "_trust_env",
189
+ "_default_headers",
190
+ "_skip_auto_headers",
191
+ "_request_class",
192
+ "_response_class",
193
+ "_ws_response_class",
194
+ "_trace_configs",
195
+ "_read_bufsize",
196
+ "_max_line_size",
197
+ "_max_field_size",
198
+ "_resolve_charset",
199
+ ]
200
+ )
201
+
202
+ _source_traceback: Optional[traceback.StackSummary] = None
203
+ _connector: Optional[BaseConnector] = None
204
+
205
+ def __init__(
206
+ self,
207
+ base_url: Optional[StrOrURL] = None,
208
+ *,
209
+ connector: Optional[BaseConnector] = None,
210
+ loop: Optional[asyncio.AbstractEventLoop] = None,
211
+ cookies: Optional[LooseCookies] = None,
212
+ headers: Optional[LooseHeaders] = None,
213
+ skip_auto_headers: Optional[Iterable[str]] = None,
214
+ auth: Optional[BasicAuth] = None,
215
+ json_serialize: JSONEncoder = json.dumps,
216
+ request_class: Type[ClientRequest] = ClientRequest,
217
+ response_class: Type[ClientResponse] = ClientResponse,
218
+ ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
219
+ version: HttpVersion = http.HttpVersion11,
220
+ cookie_jar: Optional[AbstractCookieJar] = None,
221
+ connector_owner: bool = True,
222
+ raise_for_status: Union[
223
+ bool, Callable[[ClientResponse], Awaitable[None]]
224
+ ] = False,
225
+ read_timeout: Union[float, _SENTINEL] = sentinel,
226
+ conn_timeout: Optional[float] = None,
227
+ timeout: Union[object, ClientTimeout] = sentinel,
228
+ auto_decompress: bool = True,
229
+ trust_env: bool = False,
230
+ requote_redirect_url: bool = True,
231
+ trace_configs: Optional[List[TraceConfig]] = None,
232
+ read_bufsize: int = 2**16,
233
+ max_line_size: int = 8190,
234
+ max_field_size: int = 8190,
235
+ fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
236
+ ) -> None:
237
+ # We initialise _connector to None immediately, as it's referenced in __del__()
238
+ # and could cause issues if an exception occurs during initialisation.
239
+ self._connector: Optional[BaseConnector] = None
240
+ if timeout is sentinel or timeout is None:
241
+ self._timeout = DEFAULT_TIMEOUT
242
+ if read_timeout is not sentinel:
243
+ warnings.warn(
244
+ "read_timeout is deprecated, " "use timeout argument instead",
245
+ DeprecationWarning,
246
+ stacklevel=2,
247
+ )
248
+ self._timeout = attr.evolve(self._timeout, total=read_timeout)
249
+ if conn_timeout is not None:
250
+ self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
251
+ warnings.warn(
252
+ "conn_timeout is deprecated, " "use timeout argument instead",
253
+ DeprecationWarning,
254
+ stacklevel=2,
255
+ )
256
+ else:
257
+ if not isinstance(timeout, ClientTimeout):
258
+ raise ValueError(
259
+ f"timeout parameter cannot be of {type(timeout)} type, "
260
+ "please use 'timeout=ClientTimeout(...)'",
261
+ )
262
+ self._timeout = timeout
263
+ if read_timeout is not sentinel:
264
+ raise ValueError(
265
+ "read_timeout and timeout parameters "
266
+ "conflict, please setup "
267
+ "timeout.read"
268
+ )
269
+ if conn_timeout is not None:
270
+ raise ValueError(
271
+ "conn_timeout and timeout parameters "
272
+ "conflict, please setup "
273
+ "timeout.connect"
274
+ )
275
+ if loop is None:
276
+ if connector is not None:
277
+ loop = connector._loop
278
+
279
+ loop = get_running_loop(loop)
280
+
281
+ if base_url is None or isinstance(base_url, URL):
282
+ self._base_url: Optional[URL] = base_url
283
+ else:
284
+ self._base_url = URL(base_url)
285
+ assert (
286
+ self._base_url.origin() == self._base_url
287
+ ), "Only absolute URLs without path part are supported"
288
+
289
+ if connector is None:
290
+ connector = TCPConnector(loop=loop)
291
+
292
+ if connector._loop is not loop:
293
+ raise RuntimeError("Session and connector has to use same event loop")
294
+
295
+ self._loop = loop
296
+
297
+ if loop.get_debug():
298
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
299
+
300
+ if cookie_jar is None:
301
+ cookie_jar = CookieJar(loop=loop)
302
+ self._cookie_jar = cookie_jar
303
+
304
+ if cookies is not None:
305
+ self._cookie_jar.update_cookies(cookies)
306
+
307
+ self._connector = connector
308
+ self._connector_owner = connector_owner
309
+ self._default_auth = auth
310
+ self._version = version
311
+ self._json_serialize = json_serialize
312
+ self._raise_for_status = raise_for_status
313
+ self._auto_decompress = auto_decompress
314
+ self._trust_env = trust_env
315
+ self._requote_redirect_url = requote_redirect_url
316
+ self._read_bufsize = read_bufsize
317
+ self._max_line_size = max_line_size
318
+ self._max_field_size = max_field_size
319
+
320
+ # Convert to list of tuples
321
+ if headers:
322
+ real_headers: CIMultiDict[str] = CIMultiDict(headers)
323
+ else:
324
+ real_headers = CIMultiDict()
325
+ self._default_headers: CIMultiDict[str] = real_headers
326
+ if skip_auto_headers is not None:
327
+ self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
328
+ else:
329
+ self._skip_auto_headers = frozenset()
330
+
331
+ self._request_class = request_class
332
+ self._response_class = response_class
333
+ self._ws_response_class = ws_response_class
334
+
335
+ self._trace_configs = trace_configs or []
336
+ for trace_config in self._trace_configs:
337
+ trace_config.freeze()
338
+
339
+ self._resolve_charset = fallback_charset_resolver
340
+
341
+ def __init_subclass__(cls: Type["ClientSession"]) -> None:
342
+ warnings.warn(
343
+ "Inheritance class {} from ClientSession "
344
+ "is discouraged".format(cls.__name__),
345
+ DeprecationWarning,
346
+ stacklevel=2,
347
+ )
348
+
349
+ if DEBUG:
350
+
351
+ def __setattr__(self, name: str, val: Any) -> None:
352
+ if name not in self.ATTRS:
353
+ warnings.warn(
354
+ "Setting custom ClientSession.{} attribute "
355
+ "is discouraged".format(name),
356
+ DeprecationWarning,
357
+ stacklevel=2,
358
+ )
359
+ super().__setattr__(name, val)
360
+
361
+ def __del__(self, _warnings: Any = warnings) -> None:
362
+ if not self.closed:
363
+ kwargs = {"source": self}
364
+ _warnings.warn(
365
+ f"Unclosed client session {self!r}", ResourceWarning, **kwargs
366
+ )
367
+ context = {"client_session": self, "message": "Unclosed client session"}
368
+ if self._source_traceback is not None:
369
+ context["source_traceback"] = self._source_traceback
370
+ self._loop.call_exception_handler(context)
371
+
372
+ def request(
373
+ self, method: str, url: StrOrURL, **kwargs: Any
374
+ ) -> "_RequestContextManager":
375
+ """Perform HTTP request."""
376
+ return _RequestContextManager(self._request(method, url, **kwargs))
377
+
378
+ def _build_url(self, str_or_url: StrOrURL) -> URL:
379
+ url = URL(str_or_url)
380
+ if self._base_url is None:
381
+ return url
382
+ else:
383
+ assert not url.is_absolute() and url.path.startswith("/")
384
+ return self._base_url.join(url)
385
+
386
+ async def _request(
387
+ self,
388
+ method: str,
389
+ str_or_url: StrOrURL,
390
+ *,
391
+ params: Optional[Mapping[str, str]] = None,
392
+ data: Any = None,
393
+ json: Any = None,
394
+ cookies: Optional[LooseCookies] = None,
395
+ headers: Optional[LooseHeaders] = None,
396
+ skip_auto_headers: Optional[Iterable[str]] = None,
397
+ auth: Optional[BasicAuth] = None,
398
+ allow_redirects: bool = True,
399
+ max_redirects: int = 10,
400
+ compress: Optional[str] = None,
401
+ chunked: Optional[bool] = None,
402
+ expect100: bool = False,
403
+ raise_for_status: Union[
404
+ None, bool, Callable[[ClientResponse], Awaitable[None]]
405
+ ] = None,
406
+ read_until_eof: bool = True,
407
+ proxy: Optional[StrOrURL] = None,
408
+ proxy_auth: Optional[BasicAuth] = None,
409
+ timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
410
+ verify_ssl: Optional[bool] = None,
411
+ fingerprint: Optional[bytes] = None,
412
+ ssl_context: Optional[SSLContext] = None,
413
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
414
+ server_hostname: Optional[str] = None,
415
+ proxy_headers: Optional[LooseHeaders] = None,
416
+ trace_request_ctx: Optional[SimpleNamespace] = None,
417
+ read_bufsize: Optional[int] = None,
418
+ auto_decompress: Optional[bool] = None,
419
+ max_line_size: Optional[int] = None,
420
+ max_field_size: Optional[int] = None,
421
+ ) -> ClientResponse:
422
+
423
+ # NOTE: timeout clamps existing connect and read timeouts. We cannot
424
+ # set the default to None because we need to detect if the user wants
425
+ # to use the existing timeouts by setting timeout to None.
426
+
427
+ if self.closed:
428
+ raise RuntimeError("Session is closed")
429
+
430
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
431
+
432
+ if data is not None and json is not None:
433
+ raise ValueError(
434
+ "data and json parameters can not be used at the same time"
435
+ )
436
+ elif json is not None:
437
+ data = payload.JsonPayload(json, dumps=self._json_serialize)
438
+
439
+ if not isinstance(chunked, bool) and chunked is not None:
440
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
441
+
442
+ redirects = 0
443
+ history = []
444
+ version = self._version
445
+ params = params or {}
446
+
447
+ # Merge with default headers and transform to CIMultiDict
448
+ headers = self._prepare_headers(headers)
449
+ proxy_headers = self._prepare_headers(proxy_headers)
450
+
451
+ try:
452
+ url = self._build_url(str_or_url)
453
+ except ValueError as e:
454
+ raise InvalidURL(str_or_url) from e
455
+
456
+ skip_headers = set(self._skip_auto_headers)
457
+ if skip_auto_headers is not None:
458
+ for i in skip_auto_headers:
459
+ skip_headers.add(istr(i))
460
+
461
+ if proxy is not None:
462
+ try:
463
+ proxy = URL(proxy)
464
+ except ValueError as e:
465
+ raise InvalidURL(proxy) from e
466
+
467
+ if timeout is sentinel:
468
+ real_timeout: ClientTimeout = self._timeout
469
+ else:
470
+ if not isinstance(timeout, ClientTimeout):
471
+ real_timeout = ClientTimeout(total=timeout)
472
+ else:
473
+ real_timeout = timeout
474
+ # timeout is cumulative for all request operations
475
+ # (request, redirects, responses, data consuming)
476
+ tm = TimeoutHandle(
477
+ self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
478
+ )
479
+ handle = tm.start()
480
+
481
+ if read_bufsize is None:
482
+ read_bufsize = self._read_bufsize
483
+
484
+ if auto_decompress is None:
485
+ auto_decompress = self._auto_decompress
486
+
487
+ if max_line_size is None:
488
+ max_line_size = self._max_line_size
489
+
490
+ if max_field_size is None:
491
+ max_field_size = self._max_field_size
492
+
493
+ traces = [
494
+ Trace(
495
+ self,
496
+ trace_config,
497
+ trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
498
+ )
499
+ for trace_config in self._trace_configs
500
+ ]
501
+
502
+ for trace in traces:
503
+ await trace.send_request_start(method, url.update_query(params), headers)
504
+
505
+ timer = tm.timer()
506
+ try:
507
+ with timer:
508
+ while True:
509
+ url, auth_from_url = strip_auth_from_url(url)
510
+ if auth and auth_from_url:
511
+ raise ValueError(
512
+ "Cannot combine AUTH argument with "
513
+ "credentials encoded in URL"
514
+ )
515
+
516
+ if auth is None:
517
+ auth = auth_from_url
518
+ if auth is None:
519
+ auth = self._default_auth
520
+ # It would be confusing if we support explicit
521
+ # Authorization header with auth argument
522
+ if (
523
+ headers is not None
524
+ and auth is not None
525
+ and hdrs.AUTHORIZATION in headers
526
+ ):
527
+ raise ValueError(
528
+ "Cannot combine AUTHORIZATION header "
529
+ "with AUTH argument or credentials "
530
+ "encoded in URL"
531
+ )
532
+
533
+ all_cookies = self._cookie_jar.filter_cookies(url)
534
+
535
+ if cookies is not None:
536
+ tmp_cookie_jar = CookieJar()
537
+ tmp_cookie_jar.update_cookies(cookies)
538
+ req_cookies = tmp_cookie_jar.filter_cookies(url)
539
+ if req_cookies:
540
+ all_cookies.load(req_cookies)
541
+
542
+ if proxy is not None:
543
+ proxy = URL(proxy)
544
+ elif self._trust_env:
545
+ with suppress(LookupError):
546
+ proxy, proxy_auth = get_env_proxy_for_url(url)
547
+
548
+ req = self._request_class(
549
+ method,
550
+ url,
551
+ params=params,
552
+ headers=headers,
553
+ skip_auto_headers=skip_headers,
554
+ data=data,
555
+ cookies=all_cookies,
556
+ auth=auth,
557
+ version=version,
558
+ compress=compress,
559
+ chunked=chunked,
560
+ expect100=expect100,
561
+ loop=self._loop,
562
+ response_class=self._response_class,
563
+ proxy=proxy,
564
+ proxy_auth=proxy_auth,
565
+ timer=timer,
566
+ session=self,
567
+ ssl=ssl if ssl is not None else True,
568
+ server_hostname=server_hostname,
569
+ proxy_headers=proxy_headers,
570
+ traces=traces,
571
+ trust_env=self.trust_env,
572
+ )
573
+
574
+ # connection timeout
575
+ try:
576
+ async with ceil_timeout(
577
+ real_timeout.connect,
578
+ ceil_threshold=real_timeout.ceil_threshold,
579
+ ):
580
+ assert self._connector is not None
581
+ conn = await self._connector.connect(
582
+ req, traces=traces, timeout=real_timeout
583
+ )
584
+ except asyncio.TimeoutError as exc:
585
+ raise ServerTimeoutError(
586
+ "Connection timeout " "to host {}".format(url)
587
+ ) from exc
588
+
589
+ assert conn.transport is not None
590
+
591
+ assert conn.protocol is not None
592
+ conn.protocol.set_response_params(
593
+ timer=timer,
594
+ skip_payload=method_must_be_empty_body(method),
595
+ read_until_eof=read_until_eof,
596
+ auto_decompress=auto_decompress,
597
+ read_timeout=real_timeout.sock_read,
598
+ read_bufsize=read_bufsize,
599
+ timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
600
+ max_line_size=max_line_size,
601
+ max_field_size=max_field_size,
602
+ )
603
+
604
+ try:
605
+ try:
606
+ resp = await req.send(conn)
607
+ try:
608
+ await resp.start(conn)
609
+ except BaseException:
610
+ resp.close()
611
+ raise
612
+ except BaseException:
613
+ conn.close()
614
+ raise
615
+ except ClientError:
616
+ raise
617
+ except OSError as exc:
618
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
619
+ raise
620
+ raise ClientOSError(*exc.args) from exc
621
+
622
+ self._cookie_jar.update_cookies(resp.cookies, resp.url)
623
+
624
+ # redirects
625
+ if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
626
+
627
+ for trace in traces:
628
+ await trace.send_request_redirect(
629
+ method, url.update_query(params), headers, resp
630
+ )
631
+
632
+ redirects += 1
633
+ history.append(resp)
634
+ if max_redirects and redirects >= max_redirects:
635
+ resp.close()
636
+ raise TooManyRedirects(
637
+ history[0].request_info, tuple(history)
638
+ )
639
+
640
+ # For 301 and 302, mimic IE, now changed in RFC
641
+ # https://github.com/kennethreitz/requests/pull/269
642
+ if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
643
+ resp.status in (301, 302) and resp.method == hdrs.METH_POST
644
+ ):
645
+ method = hdrs.METH_GET
646
+ data = None
647
+ if headers.get(hdrs.CONTENT_LENGTH):
648
+ headers.pop(hdrs.CONTENT_LENGTH)
649
+
650
+ r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
651
+ hdrs.URI
652
+ )
653
+ if r_url is None:
654
+ # see github.com/aio-libs/aiohttp/issues/2022
655
+ break
656
+ else:
657
+ # reading from correct redirection
658
+ # response is forbidden
659
+ resp.release()
660
+
661
+ try:
662
+ parsed_url = URL(
663
+ r_url, encoded=not self._requote_redirect_url
664
+ )
665
+
666
+ except ValueError as e:
667
+ raise InvalidURL(r_url) from e
668
+
669
+ scheme = parsed_url.scheme
670
+ if scheme not in ("http", "https", ""):
671
+ resp.close()
672
+ raise ValueError("Can redirect only to http or https")
673
+ elif not scheme:
674
+ parsed_url = url.join(parsed_url)
675
+
676
+ if url.origin() != parsed_url.origin():
677
+ auth = None
678
+ headers.pop(hdrs.AUTHORIZATION, None)
679
+
680
+ url = parsed_url
681
+ params = {}
682
+ resp.release()
683
+ continue
684
+
685
+ break
686
+
687
+ # check response status
688
+ if raise_for_status is None:
689
+ raise_for_status = self._raise_for_status
690
+
691
+ if raise_for_status is None:
692
+ pass
693
+ elif callable(raise_for_status):
694
+ await raise_for_status(resp)
695
+ elif raise_for_status:
696
+ resp.raise_for_status()
697
+
698
+ # register connection
699
+ if handle is not None:
700
+ if resp.connection is not None:
701
+ resp.connection.add_callback(handle.cancel)
702
+ else:
703
+ handle.cancel()
704
+
705
+ resp._history = tuple(history)
706
+
707
+ for trace in traces:
708
+ await trace.send_request_end(
709
+ method, url.update_query(params), headers, resp
710
+ )
711
+ return resp
712
+
713
+ except BaseException as e:
714
+ # cleanup timer
715
+ tm.close()
716
+ if handle:
717
+ handle.cancel()
718
+ handle = None
719
+
720
+ for trace in traces:
721
+ await trace.send_request_exception(
722
+ method, url.update_query(params), headers, e
723
+ )
724
+ raise
725
+
726
+ def ws_connect(
727
+ self,
728
+ url: StrOrURL,
729
+ *,
730
+ method: str = hdrs.METH_GET,
731
+ protocols: Iterable[str] = (),
732
+ timeout: float = 10.0,
733
+ receive_timeout: Optional[float] = None,
734
+ autoclose: bool = True,
735
+ autoping: bool = True,
736
+ heartbeat: Optional[float] = None,
737
+ auth: Optional[BasicAuth] = None,
738
+ origin: Optional[str] = None,
739
+ params: Optional[Mapping[str, str]] = None,
740
+ headers: Optional[LooseHeaders] = None,
741
+ proxy: Optional[StrOrURL] = None,
742
+ proxy_auth: Optional[BasicAuth] = None,
743
+ ssl: Union[SSLContext, bool, None, Fingerprint] = True,
744
+ verify_ssl: Optional[bool] = None,
745
+ fingerprint: Optional[bytes] = None,
746
+ ssl_context: Optional[SSLContext] = None,
747
+ proxy_headers: Optional[LooseHeaders] = None,
748
+ compress: int = 0,
749
+ max_msg_size: int = 4 * 1024 * 1024,
750
+ ) -> "_WSRequestContextManager":
751
+ """Initiate websocket connection."""
752
+ return _WSRequestContextManager(
753
+ self._ws_connect(
754
+ url,
755
+ method=method,
756
+ protocols=protocols,
757
+ timeout=timeout,
758
+ receive_timeout=receive_timeout,
759
+ autoclose=autoclose,
760
+ autoping=autoping,
761
+ heartbeat=heartbeat,
762
+ auth=auth,
763
+ origin=origin,
764
+ params=params,
765
+ headers=headers,
766
+ proxy=proxy,
767
+ proxy_auth=proxy_auth,
768
+ ssl=ssl,
769
+ verify_ssl=verify_ssl,
770
+ fingerprint=fingerprint,
771
+ ssl_context=ssl_context,
772
+ proxy_headers=proxy_headers,
773
+ compress=compress,
774
+ max_msg_size=max_msg_size,
775
+ )
776
+ )
777
+
778
+ async def _ws_connect(
779
+ self,
780
+ url: StrOrURL,
781
+ *,
782
+ method: str = hdrs.METH_GET,
783
+ protocols: Iterable[str] = (),
784
+ timeout: float = 10.0,
785
+ receive_timeout: Optional[float] = None,
786
+ autoclose: bool = True,
787
+ autoping: bool = True,
788
+ heartbeat: Optional[float] = None,
789
+ auth: Optional[BasicAuth] = None,
790
+ origin: Optional[str] = None,
791
+ params: Optional[Mapping[str, str]] = None,
792
+ headers: Optional[LooseHeaders] = None,
793
+ proxy: Optional[StrOrURL] = None,
794
+ proxy_auth: Optional[BasicAuth] = None,
795
+ ssl: Optional[Union[SSLContext, bool, Fingerprint]] = True,
796
+ verify_ssl: Optional[bool] = None,
797
+ fingerprint: Optional[bytes] = None,
798
+ ssl_context: Optional[SSLContext] = None,
799
+ proxy_headers: Optional[LooseHeaders] = None,
800
+ compress: int = 0,
801
+ max_msg_size: int = 4 * 1024 * 1024,
802
+ ) -> ClientWebSocketResponse:
803
+
804
+ if headers is None:
805
+ real_headers: CIMultiDict[str] = CIMultiDict()
806
+ else:
807
+ real_headers = CIMultiDict(headers)
808
+
809
+ default_headers = {
810
+ hdrs.UPGRADE: "websocket",
811
+ hdrs.CONNECTION: "Upgrade",
812
+ hdrs.SEC_WEBSOCKET_VERSION: "13",
813
+ }
814
+
815
+ for key, value in default_headers.items():
816
+ real_headers.setdefault(key, value)
817
+
818
+ sec_key = base64.b64encode(os.urandom(16))
819
+ real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
820
+
821
+ if protocols:
822
+ real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
823
+ if origin is not None:
824
+ real_headers[hdrs.ORIGIN] = origin
825
+ if compress:
826
+ extstr = ws_ext_gen(compress=compress)
827
+ real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
828
+
829
+ # For the sake of backward compatibility, if user passes in None, convert it to True
830
+ if ssl is None:
831
+ ssl = True
832
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
833
+
834
+ # send request
835
+ resp = await self.request(
836
+ method,
837
+ url,
838
+ params=params,
839
+ headers=real_headers,
840
+ read_until_eof=False,
841
+ auth=auth,
842
+ proxy=proxy,
843
+ proxy_auth=proxy_auth,
844
+ ssl=ssl,
845
+ proxy_headers=proxy_headers,
846
+ )
847
+
848
+ try:
849
+ # check handshake
850
+ if resp.status != 101:
851
+ raise WSServerHandshakeError(
852
+ resp.request_info,
853
+ resp.history,
854
+ message="Invalid response status",
855
+ status=resp.status,
856
+ headers=resp.headers,
857
+ )
858
+
859
+ if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
860
+ raise WSServerHandshakeError(
861
+ resp.request_info,
862
+ resp.history,
863
+ message="Invalid upgrade header",
864
+ status=resp.status,
865
+ headers=resp.headers,
866
+ )
867
+
868
+ if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
869
+ raise WSServerHandshakeError(
870
+ resp.request_info,
871
+ resp.history,
872
+ message="Invalid connection header",
873
+ status=resp.status,
874
+ headers=resp.headers,
875
+ )
876
+
877
+ # key calculation
878
+ r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
879
+ match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
880
+ if r_key != match:
881
+ raise WSServerHandshakeError(
882
+ resp.request_info,
883
+ resp.history,
884
+ message="Invalid challenge response",
885
+ status=resp.status,
886
+ headers=resp.headers,
887
+ )
888
+
889
+ # websocket protocol
890
+ protocol = None
891
+ if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
892
+ resp_protocols = [
893
+ proto.strip()
894
+ for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
895
+ ]
896
+
897
+ for proto in resp_protocols:
898
+ if proto in protocols:
899
+ protocol = proto
900
+ break
901
+
902
+ # websocket compress
903
+ notakeover = False
904
+ if compress:
905
+ compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
906
+ if compress_hdrs:
907
+ try:
908
+ compress, notakeover = ws_ext_parse(compress_hdrs)
909
+ except WSHandshakeError as exc:
910
+ raise WSServerHandshakeError(
911
+ resp.request_info,
912
+ resp.history,
913
+ message=exc.args[0],
914
+ status=resp.status,
915
+ headers=resp.headers,
916
+ ) from exc
917
+ else:
918
+ compress = 0
919
+ notakeover = False
920
+
921
+ conn = resp.connection
922
+ assert conn is not None
923
+ conn_proto = conn.protocol
924
+ assert conn_proto is not None
925
+ transport = conn.transport
926
+ assert transport is not None
927
+ reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(
928
+ conn_proto, 2**16, loop=self._loop
929
+ )
930
+ conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
931
+ writer = WebSocketWriter(
932
+ conn_proto,
933
+ transport,
934
+ use_mask=True,
935
+ compress=compress,
936
+ notakeover=notakeover,
937
+ )
938
+ except BaseException:
939
+ resp.close()
940
+ raise
941
+ else:
942
+ return self._ws_response_class(
943
+ reader,
944
+ writer,
945
+ protocol,
946
+ resp,
947
+ timeout,
948
+ autoclose,
949
+ autoping,
950
+ self._loop,
951
+ receive_timeout=receive_timeout,
952
+ heartbeat=heartbeat,
953
+ compress=compress,
954
+ client_notakeover=notakeover,
955
+ )
956
+
957
+ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
958
+ """Add default headers and transform it to CIMultiDict"""
959
+ # Convert headers to MultiDict
960
+ result = CIMultiDict(self._default_headers)
961
+ if headers:
962
+ if not isinstance(headers, (MultiDictProxy, MultiDict)):
963
+ headers = CIMultiDict(headers)
964
+ added_names: Set[str] = set()
965
+ for key, value in headers.items():
966
+ if key in added_names:
967
+ result.add(key, value)
968
+ else:
969
+ result[key] = value
970
+ added_names.add(key)
971
+ return result
972
+
973
+ def get(
974
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
975
+ ) -> "_RequestContextManager":
976
+ """Perform HTTP GET request."""
977
+ return _RequestContextManager(
978
+ self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
979
+ )
980
+
981
+ def options(
982
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
983
+ ) -> "_RequestContextManager":
984
+ """Perform HTTP OPTIONS request."""
985
+ return _RequestContextManager(
986
+ self._request(
987
+ hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
988
+ )
989
+ )
990
+
991
+ def head(
992
+ self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
993
+ ) -> "_RequestContextManager":
994
+ """Perform HTTP HEAD request."""
995
+ return _RequestContextManager(
996
+ self._request(
997
+ hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
998
+ )
999
+ )
1000
+
1001
+ def post(
1002
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1003
+ ) -> "_RequestContextManager":
1004
+ """Perform HTTP POST request."""
1005
+ return _RequestContextManager(
1006
+ self._request(hdrs.METH_POST, url, data=data, **kwargs)
1007
+ )
1008
+
1009
+ def put(
1010
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1011
+ ) -> "_RequestContextManager":
1012
+ """Perform HTTP PUT request."""
1013
+ return _RequestContextManager(
1014
+ self._request(hdrs.METH_PUT, url, data=data, **kwargs)
1015
+ )
1016
+
1017
+ def patch(
1018
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1019
+ ) -> "_RequestContextManager":
1020
+ """Perform HTTP PATCH request."""
1021
+ return _RequestContextManager(
1022
+ self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
1023
+ )
1024
+
1025
+ def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
1026
+ """Perform HTTP DELETE request."""
1027
+ return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
1028
+
1029
+ async def close(self) -> None:
1030
+ """Close underlying connector.
1031
+
1032
+ Release all acquired resources.
1033
+ """
1034
+ if not self.closed:
1035
+ if self._connector is not None and self._connector_owner:
1036
+ await self._connector.close()
1037
+ self._connector = None
1038
+
1039
+ @property
1040
+ def closed(self) -> bool:
1041
+ """Is client session closed.
1042
+
1043
+ A readonly property.
1044
+ """
1045
+ return self._connector is None or self._connector.closed
1046
+
1047
+ @property
1048
+ def connector(self) -> Optional[BaseConnector]:
1049
+ """Connector instance used for the session."""
1050
+ return self._connector
1051
+
1052
+ @property
1053
+ def cookie_jar(self) -> AbstractCookieJar:
1054
+ """The session cookies."""
1055
+ return self._cookie_jar
1056
+
1057
+ @property
1058
+ def version(self) -> Tuple[int, int]:
1059
+ """The session HTTP protocol version."""
1060
+ return self._version
1061
+
1062
+ @property
1063
+ def requote_redirect_url(self) -> bool:
1064
+ """Do URL requoting on redirection handling."""
1065
+ return self._requote_redirect_url
1066
+
1067
+ @requote_redirect_url.setter
1068
+ def requote_redirect_url(self, val: bool) -> None:
1069
+ """Do URL requoting on redirection handling."""
1070
+ warnings.warn(
1071
+ "session.requote_redirect_url modification " "is deprecated #2778",
1072
+ DeprecationWarning,
1073
+ stacklevel=2,
1074
+ )
1075
+ self._requote_redirect_url = val
1076
+
1077
+ @property
1078
+ def loop(self) -> asyncio.AbstractEventLoop:
1079
+ """Session's loop."""
1080
+ warnings.warn(
1081
+ "client.loop property is deprecated", DeprecationWarning, stacklevel=2
1082
+ )
1083
+ return self._loop
1084
+
1085
+ @property
1086
+ def timeout(self) -> ClientTimeout:
1087
+ """Timeout for the session."""
1088
+ return self._timeout
1089
+
1090
+ @property
1091
+ def headers(self) -> "CIMultiDict[str]":
1092
+ """The default headers of the client session."""
1093
+ return self._default_headers
1094
+
1095
+ @property
1096
+ def skip_auto_headers(self) -> FrozenSet[istr]:
1097
+ """Headers for which autogeneration should be skipped"""
1098
+ return self._skip_auto_headers
1099
+
1100
+ @property
1101
+ def auth(self) -> Optional[BasicAuth]:
1102
+ """An object that represents HTTP Basic Authorization"""
1103
+ return self._default_auth
1104
+
1105
+ @property
1106
+ def json_serialize(self) -> JSONEncoder:
1107
+ """Json serializer callable"""
1108
+ return self._json_serialize
1109
+
1110
+ @property
1111
+ def connector_owner(self) -> bool:
1112
+ """Should connector be closed on session closing"""
1113
+ return self._connector_owner
1114
+
1115
+ @property
1116
+ def raise_for_status(
1117
+ self,
1118
+ ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
1119
+ """Should `ClientResponse.raise_for_status()` be called for each response."""
1120
+ return self._raise_for_status
1121
+
1122
+ @property
1123
+ def auto_decompress(self) -> bool:
1124
+ """Should the body response be automatically decompressed."""
1125
+ return self._auto_decompress
1126
+
1127
+ @property
1128
+ def trust_env(self) -> bool:
1129
+ """
1130
+ Should proxies information from environment or netrc be trusted.
1131
+
1132
+ Information is from HTTP_PROXY / HTTPS_PROXY environment variables
1133
+ or ~/.netrc file if present.
1134
+ """
1135
+ return self._trust_env
1136
+
1137
+ @property
1138
+ def trace_configs(self) -> List[TraceConfig]:
1139
+ """A list of TraceConfig instances used for client tracing"""
1140
+ return self._trace_configs
1141
+
1142
+ def detach(self) -> None:
1143
+ """Detach connector from session without closing the former.
1144
+
1145
+ Session is switched to closed state anyway.
1146
+ """
1147
+ self._connector = None
1148
+
1149
+ def __enter__(self) -> None:
1150
+ raise TypeError("Use async with instead")
1151
+
1152
+ def __exit__(
1153
+ self,
1154
+ exc_type: Optional[Type[BaseException]],
1155
+ exc_val: Optional[BaseException],
1156
+ exc_tb: Optional[TracebackType],
1157
+ ) -> None:
1158
+ # __exit__ should exist in pair with __enter__ but never executed
1159
+ pass # pragma: no cover
1160
+
1161
+ async def __aenter__(self) -> "ClientSession":
1162
+ return self
1163
+
1164
+ async def __aexit__(
1165
+ self,
1166
+ exc_type: Optional[Type[BaseException]],
1167
+ exc_val: Optional[BaseException],
1168
+ exc_tb: Optional[TracebackType],
1169
+ ) -> None:
1170
+ await self.close()
1171
+
1172
+
1173
+ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
1174
+
1175
+ __slots__ = ("_coro", "_resp")
1176
+
1177
+ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
1178
+ self._coro = coro
1179
+
1180
+ def send(self, arg: None) -> "asyncio.Future[Any]":
1181
+ return self._coro.send(arg)
1182
+
1183
+ def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
1184
+ return self._coro.throw(*args, **kwargs)
1185
+
1186
+ def close(self) -> None:
1187
+ return self._coro.close()
1188
+
1189
+ def __await__(self) -> Generator[Any, None, _RetType]:
1190
+ ret = self._coro.__await__()
1191
+ return ret
1192
+
1193
+ def __iter__(self) -> Generator[Any, None, _RetType]:
1194
+ return self.__await__()
1195
+
1196
+ async def __aenter__(self) -> _RetType:
1197
+ self._resp = await self._coro
1198
+ return self._resp
1199
+
1200
+
1201
+ class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
1202
+ __slots__ = ()
1203
+
1204
+ async def __aexit__(
1205
+ self,
1206
+ exc_type: Optional[Type[BaseException]],
1207
+ exc: Optional[BaseException],
1208
+ tb: Optional[TracebackType],
1209
+ ) -> None:
1210
+ # We're basing behavior on the exception as it can be caused by
1211
+ # user code unrelated to the status of the connection. If you
1212
+ # would like to close a connection you must do that
1213
+ # explicitly. Otherwise connection error handling should kick in
1214
+ # and close/recycle the connection as required.
1215
+ self._resp.release()
1216
+ await self._resp.wait_for_close()
1217
+
1218
+
1219
+ class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
1220
+ __slots__ = ()
1221
+
1222
+ async def __aexit__(
1223
+ self,
1224
+ exc_type: Optional[Type[BaseException]],
1225
+ exc: Optional[BaseException],
1226
+ tb: Optional[TracebackType],
1227
+ ) -> None:
1228
+ await self._resp.close()
1229
+
1230
+
1231
+ class _SessionRequestContextManager:
1232
+
1233
+ __slots__ = ("_coro", "_resp", "_session")
1234
+
1235
+ def __init__(
1236
+ self,
1237
+ coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
1238
+ session: ClientSession,
1239
+ ) -> None:
1240
+ self._coro = coro
1241
+ self._resp: Optional[ClientResponse] = None
1242
+ self._session = session
1243
+
1244
+ async def __aenter__(self) -> ClientResponse:
1245
+ try:
1246
+ self._resp = await self._coro
1247
+ except BaseException:
1248
+ await self._session.close()
1249
+ raise
1250
+ else:
1251
+ return self._resp
1252
+
1253
+ async def __aexit__(
1254
+ self,
1255
+ exc_type: Optional[Type[BaseException]],
1256
+ exc: Optional[BaseException],
1257
+ tb: Optional[TracebackType],
1258
+ ) -> None:
1259
+ assert self._resp is not None
1260
+ self._resp.close()
1261
+ await self._session.close()
1262
+
1263
+
1264
+ def request(
1265
+ method: str,
1266
+ url: StrOrURL,
1267
+ *,
1268
+ params: Optional[Mapping[str, str]] = None,
1269
+ data: Any = None,
1270
+ json: Any = None,
1271
+ headers: Optional[LooseHeaders] = None,
1272
+ skip_auto_headers: Optional[Iterable[str]] = None,
1273
+ auth: Optional[BasicAuth] = None,
1274
+ allow_redirects: bool = True,
1275
+ max_redirects: int = 10,
1276
+ compress: Optional[str] = None,
1277
+ chunked: Optional[bool] = None,
1278
+ expect100: bool = False,
1279
+ raise_for_status: Optional[bool] = None,
1280
+ read_until_eof: bool = True,
1281
+ proxy: Optional[StrOrURL] = None,
1282
+ proxy_auth: Optional[BasicAuth] = None,
1283
+ timeout: Union[ClientTimeout, object] = sentinel,
1284
+ cookies: Optional[LooseCookies] = None,
1285
+ version: HttpVersion = http.HttpVersion11,
1286
+ connector: Optional[BaseConnector] = None,
1287
+ read_bufsize: Optional[int] = None,
1288
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1289
+ max_line_size: int = 8190,
1290
+ max_field_size: int = 8190,
1291
+ ) -> _SessionRequestContextManager:
1292
+ """Constructs and sends a request.
1293
+
1294
+ Returns response object.
1295
+ method - HTTP method
1296
+ url - request url
1297
+ params - (optional) Dictionary or bytes to be sent in the query
1298
+ string of the new request
1299
+ data - (optional) Dictionary, bytes, or file-like object to
1300
+ send in the body of the request
1301
+ json - (optional) Any json compatible python object
1302
+ headers - (optional) Dictionary of HTTP Headers to send with
1303
+ the request
1304
+ cookies - (optional) Dict object to send with the request
1305
+ auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
1306
+ auth - aiohttp.helpers.BasicAuth
1307
+ allow_redirects - (optional) If set to False, do not follow
1308
+ redirects
1309
+ version - Request HTTP version.
1310
+ compress - Set to True if request has to be compressed
1311
+ with deflate encoding.
1312
+ chunked - Set to chunk size for chunked transfer encoding.
1313
+ expect100 - Expect 100-continue response from server.
1314
+ connector - BaseConnector sub-class instance to support
1315
+ connection pooling.
1316
+ read_until_eof - Read response until eof if response
1317
+ does not have Content-Length header.
1318
+ loop - Optional event loop.
1319
+ timeout - Optional ClientTimeout settings structure, 5min
1320
+ total timeout by default.
1321
+ Usage::
1322
+ >>> import aiohttp
1323
+ >>> resp = await aiohttp.request('GET', 'http://python.org/')
1324
+ >>> resp
1325
+ <ClientResponse(python.org/) [200]>
1326
+ >>> data = await resp.read()
1327
+ """
1328
+ connector_owner = False
1329
+ if connector is None:
1330
+ connector_owner = True
1331
+ connector = TCPConnector(loop=loop, force_close=True)
1332
+
1333
+ session = ClientSession(
1334
+ loop=loop,
1335
+ cookies=cookies,
1336
+ version=version,
1337
+ timeout=timeout,
1338
+ connector=connector,
1339
+ connector_owner=connector_owner,
1340
+ )
1341
+
1342
+ return _SessionRequestContextManager(
1343
+ session._request(
1344
+ method,
1345
+ url,
1346
+ params=params,
1347
+ data=data,
1348
+ json=json,
1349
+ headers=headers,
1350
+ skip_auto_headers=skip_auto_headers,
1351
+ auth=auth,
1352
+ allow_redirects=allow_redirects,
1353
+ max_redirects=max_redirects,
1354
+ compress=compress,
1355
+ chunked=chunked,
1356
+ expect100=expect100,
1357
+ raise_for_status=raise_for_status,
1358
+ read_until_eof=read_until_eof,
1359
+ proxy=proxy,
1360
+ proxy_auth=proxy_auth,
1361
+ read_bufsize=read_bufsize,
1362
+ max_line_size=max_line_size,
1363
+ max_field_size=max_field_size,
1364
+ ),
1365
+ session,
1366
+ )
venv/lib/python3.10/site-packages/aiohttp/client_exceptions.py ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP related errors."""
2
+
3
+ import asyncio
4
+ import warnings
5
+ from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
6
+
7
+ from .http_parser import RawResponseMessage
8
+ from .typedefs import LooseHeaders
9
+
10
+ try:
11
+ import ssl
12
+
13
+ SSLContext = ssl.SSLContext
14
+ except ImportError: # pragma: no cover
15
+ ssl = SSLContext = None # type: ignore[assignment]
16
+
17
+
18
+ if TYPE_CHECKING:
19
+ from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
20
+ else:
21
+ RequestInfo = ClientResponse = ConnectionKey = None
22
+
23
+ __all__ = (
24
+ "ClientError",
25
+ "ClientConnectionError",
26
+ "ClientOSError",
27
+ "ClientConnectorError",
28
+ "ClientProxyConnectionError",
29
+ "ClientSSLError",
30
+ "ClientConnectorSSLError",
31
+ "ClientConnectorCertificateError",
32
+ "ServerConnectionError",
33
+ "ServerTimeoutError",
34
+ "ServerDisconnectedError",
35
+ "ServerFingerprintMismatch",
36
+ "ClientResponseError",
37
+ "ClientHttpProxyError",
38
+ "WSServerHandshakeError",
39
+ "ContentTypeError",
40
+ "ClientPayloadError",
41
+ "InvalidURL",
42
+ )
43
+
44
+
45
+ class ClientError(Exception):
46
+ """Base class for client connection errors."""
47
+
48
+
49
+ class ClientResponseError(ClientError):
50
+ """Base class for exceptions that occur after getting a response.
51
+
52
+ request_info: An instance of RequestInfo.
53
+ history: A sequence of responses, if redirects occurred.
54
+ status: HTTP status code.
55
+ message: Error message.
56
+ headers: Response headers.
57
+ """
58
+
59
+ def __init__(
60
+ self,
61
+ request_info: RequestInfo,
62
+ history: Tuple[ClientResponse, ...],
63
+ *,
64
+ code: Optional[int] = None,
65
+ status: Optional[int] = None,
66
+ message: str = "",
67
+ headers: Optional[LooseHeaders] = None,
68
+ ) -> None:
69
+ self.request_info = request_info
70
+ if code is not None:
71
+ if status is not None:
72
+ raise ValueError(
73
+ "Both code and status arguments are provided; "
74
+ "code is deprecated, use status instead"
75
+ )
76
+ warnings.warn(
77
+ "code argument is deprecated, use status instead",
78
+ DeprecationWarning,
79
+ stacklevel=2,
80
+ )
81
+ if status is not None:
82
+ self.status = status
83
+ elif code is not None:
84
+ self.status = code
85
+ else:
86
+ self.status = 0
87
+ self.message = message
88
+ self.headers = headers
89
+ self.history = history
90
+ self.args = (request_info, history)
91
+
92
+ def __str__(self) -> str:
93
+ return "{}, message={!r}, url={!r}".format(
94
+ self.status,
95
+ self.message,
96
+ self.request_info.real_url,
97
+ )
98
+
99
+ def __repr__(self) -> str:
100
+ args = f"{self.request_info!r}, {self.history!r}"
101
+ if self.status != 0:
102
+ args += f", status={self.status!r}"
103
+ if self.message != "":
104
+ args += f", message={self.message!r}"
105
+ if self.headers is not None:
106
+ args += f", headers={self.headers!r}"
107
+ return f"{type(self).__name__}({args})"
108
+
109
+ @property
110
+ def code(self) -> int:
111
+ warnings.warn(
112
+ "code property is deprecated, use status instead",
113
+ DeprecationWarning,
114
+ stacklevel=2,
115
+ )
116
+ return self.status
117
+
118
+ @code.setter
119
+ def code(self, value: int) -> None:
120
+ warnings.warn(
121
+ "code property is deprecated, use status instead",
122
+ DeprecationWarning,
123
+ stacklevel=2,
124
+ )
125
+ self.status = value
126
+
127
+
128
+ class ContentTypeError(ClientResponseError):
129
+ """ContentType found is not valid."""
130
+
131
+
132
+ class WSServerHandshakeError(ClientResponseError):
133
+ """websocket server handshake error."""
134
+
135
+
136
+ class ClientHttpProxyError(ClientResponseError):
137
+ """HTTP proxy error.
138
+
139
+ Raised in :class:`aiohttp.connector.TCPConnector` if
140
+ proxy responds with status other than ``200 OK``
141
+ on ``CONNECT`` request.
142
+ """
143
+
144
+
145
+ class TooManyRedirects(ClientResponseError):
146
+ """Client was redirected too many times."""
147
+
148
+
149
+ class ClientConnectionError(ClientError):
150
+ """Base class for client socket errors."""
151
+
152
+
153
+ class ClientOSError(ClientConnectionError, OSError):
154
+ """OSError error."""
155
+
156
+
157
+ class ClientConnectorError(ClientOSError):
158
+ """Client connector error.
159
+
160
+ Raised in :class:`aiohttp.connector.TCPConnector` if
161
+ a connection can not be established.
162
+ """
163
+
164
+ def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
165
+ self._conn_key = connection_key
166
+ self._os_error = os_error
167
+ super().__init__(os_error.errno, os_error.strerror)
168
+ self.args = (connection_key, os_error)
169
+
170
+ @property
171
+ def os_error(self) -> OSError:
172
+ return self._os_error
173
+
174
+ @property
175
+ def host(self) -> str:
176
+ return self._conn_key.host
177
+
178
+ @property
179
+ def port(self) -> Optional[int]:
180
+ return self._conn_key.port
181
+
182
+ @property
183
+ def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
184
+ return self._conn_key.ssl
185
+
186
+ def __str__(self) -> str:
187
+ return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
188
+ self, "default" if self.ssl is True else self.ssl, self.strerror
189
+ )
190
+
191
+ # OSError.__reduce__ does too much black magick
192
+ __reduce__ = BaseException.__reduce__
193
+
194
+
195
+ class ClientProxyConnectionError(ClientConnectorError):
196
+ """Proxy connection error.
197
+
198
+ Raised in :class:`aiohttp.connector.TCPConnector` if
199
+ connection to proxy can not be established.
200
+ """
201
+
202
+
203
+ class UnixClientConnectorError(ClientConnectorError):
204
+ """Unix connector error.
205
+
206
+ Raised in :py:class:`aiohttp.connector.UnixConnector`
207
+ if connection to unix socket can not be established.
208
+ """
209
+
210
+ def __init__(
211
+ self, path: str, connection_key: ConnectionKey, os_error: OSError
212
+ ) -> None:
213
+ self._path = path
214
+ super().__init__(connection_key, os_error)
215
+
216
+ @property
217
+ def path(self) -> str:
218
+ return self._path
219
+
220
+ def __str__(self) -> str:
221
+ return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
222
+ self, "default" if self.ssl is True else self.ssl, self.strerror
223
+ )
224
+
225
+
226
+ class ServerConnectionError(ClientConnectionError):
227
+ """Server connection errors."""
228
+
229
+
230
+ class ServerDisconnectedError(ServerConnectionError):
231
+ """Server disconnected."""
232
+
233
+ def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
234
+ if message is None:
235
+ message = "Server disconnected"
236
+
237
+ self.args = (message,)
238
+ self.message = message
239
+
240
+
241
+ class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
242
+ """Server timeout error."""
243
+
244
+
245
+ class ServerFingerprintMismatch(ServerConnectionError):
246
+ """SSL certificate does not match expected fingerprint."""
247
+
248
+ def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
249
+ self.expected = expected
250
+ self.got = got
251
+ self.host = host
252
+ self.port = port
253
+ self.args = (expected, got, host, port)
254
+
255
+ def __repr__(self) -> str:
256
+ return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
257
+ self.__class__.__name__, self.expected, self.got, self.host, self.port
258
+ )
259
+
260
+
261
+ class ClientPayloadError(ClientError):
262
+ """Response payload error."""
263
+
264
+
265
+ class InvalidURL(ClientError, ValueError):
266
+ """Invalid URL.
267
+
268
+ URL used for fetching is malformed, e.g. it doesn't contains host
269
+ part.
270
+ """
271
+
272
+ # Derive from ValueError for backward compatibility
273
+
274
+ def __init__(self, url: Any) -> None:
275
+ # The type of url is not yarl.URL because the exception can be raised
276
+ # on URL(url) call
277
+ super().__init__(url)
278
+
279
+ @property
280
+ def url(self) -> Any:
281
+ return self.args[0]
282
+
283
+ def __repr__(self) -> str:
284
+ return f"<{self.__class__.__name__} {self.url}>"
285
+
286
+
287
+ class ClientSSLError(ClientConnectorError):
288
+ """Base error for ssl.*Errors."""
289
+
290
+
291
+ if ssl is not None:
292
+ cert_errors = (ssl.CertificateError,)
293
+ cert_errors_bases = (
294
+ ClientSSLError,
295
+ ssl.CertificateError,
296
+ )
297
+
298
+ ssl_errors = (ssl.SSLError,)
299
+ ssl_error_bases = (ClientSSLError, ssl.SSLError)
300
+ else: # pragma: no cover
301
+ cert_errors = tuple()
302
+ cert_errors_bases = (
303
+ ClientSSLError,
304
+ ValueError,
305
+ )
306
+
307
+ ssl_errors = tuple()
308
+ ssl_error_bases = (ClientSSLError,)
309
+
310
+
311
+ class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
312
+ """Response ssl error."""
313
+
314
+
315
+ class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
316
+ """Response certificate error."""
317
+
318
+ def __init__(
319
+ self, connection_key: ConnectionKey, certificate_error: Exception
320
+ ) -> None:
321
+ self._conn_key = connection_key
322
+ self._certificate_error = certificate_error
323
+ self.args = (connection_key, certificate_error)
324
+
325
+ @property
326
+ def certificate_error(self) -> Exception:
327
+ return self._certificate_error
328
+
329
+ @property
330
+ def host(self) -> str:
331
+ return self._conn_key.host
332
+
333
+ @property
334
+ def port(self) -> Optional[int]:
335
+ return self._conn_key.port
336
+
337
+ @property
338
+ def ssl(self) -> bool:
339
+ return self._conn_key.is_ssl
340
+
341
+ def __str__(self) -> str:
342
+ return (
343
+ "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
344
+ "[{0.certificate_error.__class__.__name__}: "
345
+ "{0.certificate_error.args}]".format(self)
346
+ )
venv/lib/python3.10/site-packages/aiohttp/client_proto.py ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from contextlib import suppress
3
+ from typing import Any, Optional, Tuple
4
+
5
+ from .base_protocol import BaseProtocol
6
+ from .client_exceptions import (
7
+ ClientOSError,
8
+ ClientPayloadError,
9
+ ServerDisconnectedError,
10
+ ServerTimeoutError,
11
+ )
12
+ from .helpers import (
13
+ _EXC_SENTINEL,
14
+ BaseTimerContext,
15
+ set_exception,
16
+ status_code_must_be_empty_body,
17
+ )
18
+ from .http import HttpResponseParser, RawResponseMessage
19
+ from .http_exceptions import HttpProcessingError
20
+ from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
21
+
22
+
23
+ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
24
+ """Helper class to adapt between Protocol and StreamReader."""
25
+
26
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
27
+ BaseProtocol.__init__(self, loop=loop)
28
+ DataQueue.__init__(self, loop)
29
+
30
+ self._should_close = False
31
+
32
+ self._payload: Optional[StreamReader] = None
33
+ self._skip_payload = False
34
+ self._payload_parser = None
35
+
36
+ self._timer = None
37
+
38
+ self._tail = b""
39
+ self._upgraded = False
40
+ self._parser: Optional[HttpResponseParser] = None
41
+
42
+ self._read_timeout: Optional[float] = None
43
+ self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
44
+
45
+ self._timeout_ceil_threshold: Optional[float] = 5
46
+
47
+ @property
48
+ def upgraded(self) -> bool:
49
+ return self._upgraded
50
+
51
+ @property
52
+ def should_close(self) -> bool:
53
+ if self._payload is not None and not self._payload.is_eof() or self._upgraded:
54
+ return True
55
+
56
+ return (
57
+ self._should_close
58
+ or self._upgraded
59
+ or self.exception() is not None
60
+ or self._payload_parser is not None
61
+ or len(self) > 0
62
+ or bool(self._tail)
63
+ )
64
+
65
+ def force_close(self) -> None:
66
+ self._should_close = True
67
+
68
+ def close(self) -> None:
69
+ transport = self.transport
70
+ if transport is not None:
71
+ transport.close()
72
+ self.transport = None
73
+ self._payload = None
74
+ self._drop_timeout()
75
+
76
+ def is_connected(self) -> bool:
77
+ return self.transport is not None and not self.transport.is_closing()
78
+
79
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
80
+ self._drop_timeout()
81
+
82
+ original_connection_error = exc
83
+ reraised_exc = original_connection_error
84
+
85
+ connection_closed_cleanly = original_connection_error is None
86
+
87
+ if self._payload_parser is not None:
88
+ with suppress(Exception): # FIXME: log this somehow?
89
+ self._payload_parser.feed_eof()
90
+
91
+ uncompleted = None
92
+ if self._parser is not None:
93
+ try:
94
+ uncompleted = self._parser.feed_eof()
95
+ except Exception as underlying_exc:
96
+ if self._payload is not None:
97
+ client_payload_exc_msg = (
98
+ f"Response payload is not completed: {underlying_exc !r}"
99
+ )
100
+ if not connection_closed_cleanly:
101
+ client_payload_exc_msg = (
102
+ f"{client_payload_exc_msg !s}. "
103
+ f"{original_connection_error !r}"
104
+ )
105
+ set_exception(
106
+ self._payload,
107
+ ClientPayloadError(client_payload_exc_msg),
108
+ underlying_exc,
109
+ )
110
+
111
+ if not self.is_eof():
112
+ if isinstance(original_connection_error, OSError):
113
+ reraised_exc = ClientOSError(*original_connection_error.args)
114
+ if connection_closed_cleanly:
115
+ reraised_exc = ServerDisconnectedError(uncompleted)
116
+ # assigns self._should_close to True as side effect,
117
+ # we do it anyway below
118
+ underlying_non_eof_exc = (
119
+ _EXC_SENTINEL
120
+ if connection_closed_cleanly
121
+ else original_connection_error
122
+ )
123
+ assert underlying_non_eof_exc is not None
124
+ assert reraised_exc is not None
125
+ self.set_exception(reraised_exc, underlying_non_eof_exc)
126
+
127
+ self._should_close = True
128
+ self._parser = None
129
+ self._payload = None
130
+ self._payload_parser = None
131
+ self._reading_paused = False
132
+
133
+ super().connection_lost(reraised_exc)
134
+
135
+ def eof_received(self) -> None:
136
+ # should call parser.feed_eof() most likely
137
+ self._drop_timeout()
138
+
139
+ def pause_reading(self) -> None:
140
+ super().pause_reading()
141
+ self._drop_timeout()
142
+
143
+ def resume_reading(self) -> None:
144
+ super().resume_reading()
145
+ self._reschedule_timeout()
146
+
147
+ def set_exception(
148
+ self,
149
+ exc: BaseException,
150
+ exc_cause: BaseException = _EXC_SENTINEL,
151
+ ) -> None:
152
+ self._should_close = True
153
+ self._drop_timeout()
154
+ super().set_exception(exc, exc_cause)
155
+
156
+ def set_parser(self, parser: Any, payload: Any) -> None:
157
+ # TODO: actual types are:
158
+ # parser: WebSocketReader
159
+ # payload: FlowControlDataQueue
160
+ # but they are not generi enough
161
+ # Need an ABC for both types
162
+ self._payload = payload
163
+ self._payload_parser = parser
164
+
165
+ self._drop_timeout()
166
+
167
+ if self._tail:
168
+ data, self._tail = self._tail, b""
169
+ self.data_received(data)
170
+
171
+ def set_response_params(
172
+ self,
173
+ *,
174
+ timer: Optional[BaseTimerContext] = None,
175
+ skip_payload: bool = False,
176
+ read_until_eof: bool = False,
177
+ auto_decompress: bool = True,
178
+ read_timeout: Optional[float] = None,
179
+ read_bufsize: int = 2**16,
180
+ timeout_ceil_threshold: float = 5,
181
+ max_line_size: int = 8190,
182
+ max_field_size: int = 8190,
183
+ ) -> None:
184
+ self._skip_payload = skip_payload
185
+
186
+ self._read_timeout = read_timeout
187
+
188
+ self._timeout_ceil_threshold = timeout_ceil_threshold
189
+
190
+ self._parser = HttpResponseParser(
191
+ self,
192
+ self._loop,
193
+ read_bufsize,
194
+ timer=timer,
195
+ payload_exception=ClientPayloadError,
196
+ response_with_body=not skip_payload,
197
+ read_until_eof=read_until_eof,
198
+ auto_decompress=auto_decompress,
199
+ max_line_size=max_line_size,
200
+ max_field_size=max_field_size,
201
+ )
202
+
203
+ if self._tail:
204
+ data, self._tail = self._tail, b""
205
+ self.data_received(data)
206
+
207
+ def _drop_timeout(self) -> None:
208
+ if self._read_timeout_handle is not None:
209
+ self._read_timeout_handle.cancel()
210
+ self._read_timeout_handle = None
211
+
212
+ def _reschedule_timeout(self) -> None:
213
+ timeout = self._read_timeout
214
+ if self._read_timeout_handle is not None:
215
+ self._read_timeout_handle.cancel()
216
+
217
+ if timeout:
218
+ self._read_timeout_handle = self._loop.call_later(
219
+ timeout, self._on_read_timeout
220
+ )
221
+ else:
222
+ self._read_timeout_handle = None
223
+
224
+ def start_timeout(self) -> None:
225
+ self._reschedule_timeout()
226
+
227
+ def _on_read_timeout(self) -> None:
228
+ exc = ServerTimeoutError("Timeout on reading data from socket")
229
+ self.set_exception(exc)
230
+ if self._payload is not None:
231
+ set_exception(self._payload, exc)
232
+
233
+ def data_received(self, data: bytes) -> None:
234
+ self._reschedule_timeout()
235
+
236
+ if not data:
237
+ return
238
+
239
+ # custom payload parser
240
+ if self._payload_parser is not None:
241
+ eof, tail = self._payload_parser.feed_data(data)
242
+ if eof:
243
+ self._payload = None
244
+ self._payload_parser = None
245
+
246
+ if tail:
247
+ self.data_received(tail)
248
+ return
249
+ else:
250
+ if self._upgraded or self._parser is None:
251
+ # i.e. websocket connection, websocket parser is not set yet
252
+ self._tail += data
253
+ else:
254
+ # parse http messages
255
+ try:
256
+ messages, upgraded, tail = self._parser.feed_data(data)
257
+ except BaseException as underlying_exc:
258
+ if self.transport is not None:
259
+ # connection.release() could be called BEFORE
260
+ # data_received(), the transport is already
261
+ # closed in this case
262
+ self.transport.close()
263
+ # should_close is True after the call
264
+ self.set_exception(HttpProcessingError(), underlying_exc)
265
+ return
266
+
267
+ self._upgraded = upgraded
268
+
269
+ payload: Optional[StreamReader] = None
270
+ for message, payload in messages:
271
+ if message.should_close:
272
+ self._should_close = True
273
+
274
+ self._payload = payload
275
+
276
+ if self._skip_payload or status_code_must_be_empty_body(
277
+ message.code
278
+ ):
279
+ self.feed_data((message, EMPTY_PAYLOAD), 0)
280
+ else:
281
+ self.feed_data((message, payload), 0)
282
+ if payload is not None:
283
+ # new message(s) was processed
284
+ # register timeout handler unsubscribing
285
+ # either on end-of-stream or immediately for
286
+ # EMPTY_PAYLOAD
287
+ if payload is not EMPTY_PAYLOAD:
288
+ payload.on_eof(self._drop_timeout)
289
+ else:
290
+ self._drop_timeout()
291
+
292
+ if tail:
293
+ if upgraded:
294
+ self.data_received(tail)
295
+ else:
296
+ self._tail = tail
venv/lib/python3.10/site-packages/aiohttp/client_reqrep.py ADDED
@@ -0,0 +1,1207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import codecs
3
+ import contextlib
4
+ import functools
5
+ import io
6
+ import re
7
+ import sys
8
+ import traceback
9
+ import warnings
10
+ from hashlib import md5, sha1, sha256
11
+ from http.cookies import CookieError, Morsel, SimpleCookie
12
+ from types import MappingProxyType, TracebackType
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Callable,
17
+ Dict,
18
+ Iterable,
19
+ List,
20
+ Mapping,
21
+ Optional,
22
+ Tuple,
23
+ Type,
24
+ Union,
25
+ cast,
26
+ )
27
+
28
+ import attr
29
+ from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
30
+ from yarl import URL
31
+
32
+ from . import hdrs, helpers, http, multipart, payload
33
+ from .abc import AbstractStreamWriter
34
+ from .client_exceptions import (
35
+ ClientConnectionError,
36
+ ClientOSError,
37
+ ClientResponseError,
38
+ ContentTypeError,
39
+ InvalidURL,
40
+ ServerFingerprintMismatch,
41
+ )
42
+ from .compression_utils import HAS_BROTLI
43
+ from .formdata import FormData
44
+ from .helpers import (
45
+ BaseTimerContext,
46
+ BasicAuth,
47
+ HeadersMixin,
48
+ TimerNoop,
49
+ basicauth_from_netrc,
50
+ netrc_from_env,
51
+ noop,
52
+ reify,
53
+ set_exception,
54
+ set_result,
55
+ )
56
+ from .http import (
57
+ SERVER_SOFTWARE,
58
+ HttpVersion,
59
+ HttpVersion10,
60
+ HttpVersion11,
61
+ StreamWriter,
62
+ )
63
+ from .log import client_logger
64
+ from .streams import StreamReader
65
+ from .typedefs import (
66
+ DEFAULT_JSON_DECODER,
67
+ JSONDecoder,
68
+ LooseCookies,
69
+ LooseHeaders,
70
+ RawHeaders,
71
+ )
72
+
73
+ try:
74
+ import ssl
75
+ from ssl import SSLContext
76
+ except ImportError: # pragma: no cover
77
+ ssl = None # type: ignore[assignment]
78
+ SSLContext = object # type: ignore[misc,assignment]
79
+
80
+
81
+ __all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
82
+
83
+
84
+ if TYPE_CHECKING:
85
+ from .client import ClientSession
86
+ from .connector import Connection
87
+ from .tracing import Trace
88
+
89
+
90
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
91
+ json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
92
+
93
+
94
+ def _gen_default_accept_encoding() -> str:
95
+ return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
96
+
97
+
98
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
99
+ class ContentDisposition:
100
+ type: Optional[str]
101
+ parameters: "MappingProxyType[str, str]"
102
+ filename: Optional[str]
103
+
104
+
105
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
106
+ class RequestInfo:
107
+ url: URL
108
+ method: str
109
+ headers: "CIMultiDictProxy[str]"
110
+ real_url: URL = attr.ib()
111
+
112
+ @real_url.default
113
+ def real_url_default(self) -> URL:
114
+ return self.url
115
+
116
+
117
+ class Fingerprint:
118
+ HASHFUNC_BY_DIGESTLEN = {
119
+ 16: md5,
120
+ 20: sha1,
121
+ 32: sha256,
122
+ }
123
+
124
+ def __init__(self, fingerprint: bytes) -> None:
125
+ digestlen = len(fingerprint)
126
+ hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
127
+ if not hashfunc:
128
+ raise ValueError("fingerprint has invalid length")
129
+ elif hashfunc is md5 or hashfunc is sha1:
130
+ raise ValueError(
131
+ "md5 and sha1 are insecure and " "not supported. Use sha256."
132
+ )
133
+ self._hashfunc = hashfunc
134
+ self._fingerprint = fingerprint
135
+
136
+ @property
137
+ def fingerprint(self) -> bytes:
138
+ return self._fingerprint
139
+
140
+ def check(self, transport: asyncio.Transport) -> None:
141
+ if not transport.get_extra_info("sslcontext"):
142
+ return
143
+ sslobj = transport.get_extra_info("ssl_object")
144
+ cert = sslobj.getpeercert(binary_form=True)
145
+ got = self._hashfunc(cert).digest()
146
+ if got != self._fingerprint:
147
+ host, port, *_ = transport.get_extra_info("peername")
148
+ raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
149
+
150
+
151
+ if ssl is not None:
152
+ SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
153
+ else: # pragma: no cover
154
+ SSL_ALLOWED_TYPES = (bool, type(None))
155
+
156
+
157
+ def _merge_ssl_params(
158
+ ssl: Union["SSLContext", bool, Fingerprint],
159
+ verify_ssl: Optional[bool],
160
+ ssl_context: Optional["SSLContext"],
161
+ fingerprint: Optional[bytes],
162
+ ) -> Union["SSLContext", bool, Fingerprint]:
163
+ if ssl is None:
164
+ ssl = True # Double check for backwards compatibility
165
+ if verify_ssl is not None and not verify_ssl:
166
+ warnings.warn(
167
+ "verify_ssl is deprecated, use ssl=False instead",
168
+ DeprecationWarning,
169
+ stacklevel=3,
170
+ )
171
+ if ssl is not True:
172
+ raise ValueError(
173
+ "verify_ssl, ssl_context, fingerprint and ssl "
174
+ "parameters are mutually exclusive"
175
+ )
176
+ else:
177
+ ssl = False
178
+ if ssl_context is not None:
179
+ warnings.warn(
180
+ "ssl_context is deprecated, use ssl=context instead",
181
+ DeprecationWarning,
182
+ stacklevel=3,
183
+ )
184
+ if ssl is not True:
185
+ raise ValueError(
186
+ "verify_ssl, ssl_context, fingerprint and ssl "
187
+ "parameters are mutually exclusive"
188
+ )
189
+ else:
190
+ ssl = ssl_context
191
+ if fingerprint is not None:
192
+ warnings.warn(
193
+ "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
194
+ DeprecationWarning,
195
+ stacklevel=3,
196
+ )
197
+ if ssl is not True:
198
+ raise ValueError(
199
+ "verify_ssl, ssl_context, fingerprint and ssl "
200
+ "parameters are mutually exclusive"
201
+ )
202
+ else:
203
+ ssl = Fingerprint(fingerprint)
204
+ if not isinstance(ssl, SSL_ALLOWED_TYPES):
205
+ raise TypeError(
206
+ "ssl should be SSLContext, bool, Fingerprint or None, "
207
+ "got {!r} instead.".format(ssl)
208
+ )
209
+ return ssl
210
+
211
+
212
+ @attr.s(auto_attribs=True, slots=True, frozen=True)
213
+ class ConnectionKey:
214
+ # the key should contain an information about used proxy / TLS
215
+ # to prevent reusing wrong connections from a pool
216
+ host: str
217
+ port: Optional[int]
218
+ is_ssl: bool
219
+ ssl: Union[SSLContext, bool, Fingerprint]
220
+ proxy: Optional[URL]
221
+ proxy_auth: Optional[BasicAuth]
222
+ proxy_headers_hash: Optional[int] # hash(CIMultiDict)
223
+
224
+
225
+ def _is_expected_content_type(
226
+ response_content_type: str, expected_content_type: str
227
+ ) -> bool:
228
+ if expected_content_type == "application/json":
229
+ return json_re.match(response_content_type) is not None
230
+ return expected_content_type in response_content_type
231
+
232
+
233
+ class ClientRequest:
234
+ GET_METHODS = {
235
+ hdrs.METH_GET,
236
+ hdrs.METH_HEAD,
237
+ hdrs.METH_OPTIONS,
238
+ hdrs.METH_TRACE,
239
+ }
240
+ POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
241
+ ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
242
+
243
+ DEFAULT_HEADERS = {
244
+ hdrs.ACCEPT: "*/*",
245
+ hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
246
+ }
247
+
248
+ body = b""
249
+ auth = None
250
+ response = None
251
+
252
+ __writer = None # async task for streaming data
253
+ _continue = None # waiter future for '100 Continue' response
254
+
255
+ # N.B.
256
+ # Adding __del__ method with self._writer closing doesn't make sense
257
+ # because _writer is instance method, thus it keeps a reference to self.
258
+ # Until writer has finished finalizer will not be called.
259
+
260
+ def __init__(
261
+ self,
262
+ method: str,
263
+ url: URL,
264
+ *,
265
+ params: Optional[Mapping[str, str]] = None,
266
+ headers: Optional[LooseHeaders] = None,
267
+ skip_auto_headers: Iterable[str] = frozenset(),
268
+ data: Any = None,
269
+ cookies: Optional[LooseCookies] = None,
270
+ auth: Optional[BasicAuth] = None,
271
+ version: http.HttpVersion = http.HttpVersion11,
272
+ compress: Optional[str] = None,
273
+ chunked: Optional[bool] = None,
274
+ expect100: bool = False,
275
+ loop: Optional[asyncio.AbstractEventLoop] = None,
276
+ response_class: Optional[Type["ClientResponse"]] = None,
277
+ proxy: Optional[URL] = None,
278
+ proxy_auth: Optional[BasicAuth] = None,
279
+ timer: Optional[BaseTimerContext] = None,
280
+ session: Optional["ClientSession"] = None,
281
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
282
+ proxy_headers: Optional[LooseHeaders] = None,
283
+ traces: Optional[List["Trace"]] = None,
284
+ trust_env: bool = False,
285
+ server_hostname: Optional[str] = None,
286
+ ):
287
+ if loop is None:
288
+ loop = asyncio.get_event_loop()
289
+
290
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
291
+ if match:
292
+ raise ValueError(
293
+ f"Method cannot contain non-token characters {method!r} "
294
+ "(found at least {match.group()!r})"
295
+ )
296
+
297
+ assert isinstance(url, URL), url
298
+ assert isinstance(proxy, (URL, type(None))), proxy
299
+ # FIXME: session is None in tests only, need to fix tests
300
+ # assert session is not None
301
+ self._session = cast("ClientSession", session)
302
+ if params:
303
+ q = MultiDict(url.query)
304
+ url2 = url.with_query(params)
305
+ q.extend(url2.query)
306
+ url = url.with_query(q)
307
+ self.original_url = url
308
+ self.url = url.with_fragment(None)
309
+ self.method = method.upper()
310
+ self.chunked = chunked
311
+ self.compress = compress
312
+ self.loop = loop
313
+ self.length = None
314
+ if response_class is None:
315
+ real_response_class = ClientResponse
316
+ else:
317
+ real_response_class = response_class
318
+ self.response_class: Type[ClientResponse] = real_response_class
319
+ self._timer = timer if timer is not None else TimerNoop()
320
+ self._ssl = ssl if ssl is not None else True
321
+ self.server_hostname = server_hostname
322
+
323
+ if loop.get_debug():
324
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
325
+
326
+ self.update_version(version)
327
+ self.update_host(url)
328
+ self.update_headers(headers)
329
+ self.update_auto_headers(skip_auto_headers)
330
+ self.update_cookies(cookies)
331
+ self.update_content_encoding(data)
332
+ self.update_auth(auth, trust_env)
333
+ self.update_proxy(proxy, proxy_auth, proxy_headers)
334
+
335
+ self.update_body_from_data(data)
336
+ if data is not None or self.method not in self.GET_METHODS:
337
+ self.update_transfer_encoding()
338
+ self.update_expect_continue(expect100)
339
+ if traces is None:
340
+ traces = []
341
+ self._traces = traces
342
+
343
+ def __reset_writer(self, _: object = None) -> None:
344
+ self.__writer = None
345
+
346
+ @property
347
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
348
+ return self.__writer
349
+
350
+ @_writer.setter
351
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
352
+ if self.__writer is not None:
353
+ self.__writer.remove_done_callback(self.__reset_writer)
354
+ self.__writer = writer
355
+ if writer is not None:
356
+ writer.add_done_callback(self.__reset_writer)
357
+
358
+ def is_ssl(self) -> bool:
359
+ return self.url.scheme in ("https", "wss")
360
+
361
+ @property
362
+ def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
363
+ return self._ssl
364
+
365
+ @property
366
+ def connection_key(self) -> ConnectionKey:
367
+ proxy_headers = self.proxy_headers
368
+ if proxy_headers:
369
+ h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items()))
370
+ else:
371
+ h = None
372
+ return ConnectionKey(
373
+ self.host,
374
+ self.port,
375
+ self.is_ssl(),
376
+ self.ssl,
377
+ self.proxy,
378
+ self.proxy_auth,
379
+ h,
380
+ )
381
+
382
+ @property
383
+ def host(self) -> str:
384
+ ret = self.url.raw_host
385
+ assert ret is not None
386
+ return ret
387
+
388
+ @property
389
+ def port(self) -> Optional[int]:
390
+ return self.url.port
391
+
392
+ @property
393
+ def request_info(self) -> RequestInfo:
394
+ headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
395
+ return RequestInfo(self.url, self.method, headers, self.original_url)
396
+
397
+ def update_host(self, url: URL) -> None:
398
+ """Update destination host, port and connection type (ssl)."""
399
+ # get host/port
400
+ if not url.raw_host:
401
+ raise InvalidURL(url)
402
+
403
+ # basic auth info
404
+ username, password = url.user, url.password
405
+ if username:
406
+ self.auth = helpers.BasicAuth(username, password or "")
407
+
408
+ def update_version(self, version: Union[http.HttpVersion, str]) -> None:
409
+ """Convert request version to two elements tuple.
410
+
411
+ parser HTTP version '1.1' => (1, 1)
412
+ """
413
+ if isinstance(version, str):
414
+ v = [part.strip() for part in version.split(".", 1)]
415
+ try:
416
+ version = http.HttpVersion(int(v[0]), int(v[1]))
417
+ except ValueError:
418
+ raise ValueError(
419
+ f"Can not parse http version number: {version}"
420
+ ) from None
421
+ self.version = version
422
+
423
+ def update_headers(self, headers: Optional[LooseHeaders]) -> None:
424
+ """Update request headers."""
425
+ self.headers: CIMultiDict[str] = CIMultiDict()
426
+
427
+ # add host
428
+ netloc = cast(str, self.url.raw_host)
429
+ if helpers.is_ipv6_address(netloc):
430
+ netloc = f"[{netloc}]"
431
+ # See https://github.com/aio-libs/aiohttp/issues/3636.
432
+ netloc = netloc.rstrip(".")
433
+ if self.url.port is not None and not self.url.is_default_port():
434
+ netloc += ":" + str(self.url.port)
435
+ self.headers[hdrs.HOST] = netloc
436
+
437
+ if headers:
438
+ if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
439
+ headers = headers.items() # type: ignore[assignment]
440
+
441
+ for key, value in headers: # type: ignore[misc]
442
+ # A special case for Host header
443
+ if key.lower() == "host":
444
+ self.headers[key] = value
445
+ else:
446
+ self.headers.add(key, value)
447
+
448
+ def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
449
+ self.skip_auto_headers = CIMultiDict(
450
+ (hdr, None) for hdr in sorted(skip_auto_headers)
451
+ )
452
+ used_headers = self.headers.copy()
453
+ used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
454
+
455
+ for hdr, val in self.DEFAULT_HEADERS.items():
456
+ if hdr not in used_headers:
457
+ self.headers.add(hdr, val)
458
+
459
+ if hdrs.USER_AGENT not in used_headers:
460
+ self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
461
+
462
+ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
463
+ """Update request cookies header."""
464
+ if not cookies:
465
+ return
466
+
467
+ c = SimpleCookie()
468
+ if hdrs.COOKIE in self.headers:
469
+ c.load(self.headers.get(hdrs.COOKIE, ""))
470
+ del self.headers[hdrs.COOKIE]
471
+
472
+ if isinstance(cookies, Mapping):
473
+ iter_cookies = cookies.items()
474
+ else:
475
+ iter_cookies = cookies # type: ignore[assignment]
476
+ for name, value in iter_cookies:
477
+ if isinstance(value, Morsel):
478
+ # Preserve coded_value
479
+ mrsl_val = value.get(value.key, Morsel())
480
+ mrsl_val.set(value.key, value.value, value.coded_value)
481
+ c[name] = mrsl_val
482
+ else:
483
+ c[name] = value # type: ignore[assignment]
484
+
485
+ self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
486
+
487
+ def update_content_encoding(self, data: Any) -> None:
488
+ """Set request content encoding."""
489
+ if data is None:
490
+ return
491
+
492
+ enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
493
+ if enc:
494
+ if self.compress:
495
+ raise ValueError(
496
+ "compress can not be set " "if Content-Encoding header is set"
497
+ )
498
+ elif self.compress:
499
+ if not isinstance(self.compress, str):
500
+ self.compress = "deflate"
501
+ self.headers[hdrs.CONTENT_ENCODING] = self.compress
502
+ self.chunked = True # enable chunked, no need to deal with length
503
+
504
+ def update_transfer_encoding(self) -> None:
505
+ """Analyze transfer-encoding header."""
506
+ te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
507
+
508
+ if "chunked" in te:
509
+ if self.chunked:
510
+ raise ValueError(
511
+ "chunked can not be set "
512
+ 'if "Transfer-Encoding: chunked" header is set'
513
+ )
514
+
515
+ elif self.chunked:
516
+ if hdrs.CONTENT_LENGTH in self.headers:
517
+ raise ValueError(
518
+ "chunked can not be set " "if Content-Length header is set"
519
+ )
520
+
521
+ self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
522
+ else:
523
+ if hdrs.CONTENT_LENGTH not in self.headers:
524
+ self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
525
+
526
+ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
527
+ """Set basic auth."""
528
+ if auth is None:
529
+ auth = self.auth
530
+ if auth is None and trust_env and self.url.host is not None:
531
+ netrc_obj = netrc_from_env()
532
+ with contextlib.suppress(LookupError):
533
+ auth = basicauth_from_netrc(netrc_obj, self.url.host)
534
+ if auth is None:
535
+ return
536
+
537
+ if not isinstance(auth, helpers.BasicAuth):
538
+ raise TypeError("BasicAuth() tuple is required instead")
539
+
540
+ self.headers[hdrs.AUTHORIZATION] = auth.encode()
541
+
542
+ def update_body_from_data(self, body: Any) -> None:
543
+ if body is None:
544
+ return
545
+
546
+ # FormData
547
+ if isinstance(body, FormData):
548
+ body = body()
549
+
550
+ try:
551
+ body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
552
+ except payload.LookupError:
553
+ body = FormData(body)()
554
+
555
+ self.body = body
556
+
557
+ # enable chunked encoding if needed
558
+ if not self.chunked:
559
+ if hdrs.CONTENT_LENGTH not in self.headers:
560
+ size = body.size
561
+ if size is None:
562
+ self.chunked = True
563
+ else:
564
+ if hdrs.CONTENT_LENGTH not in self.headers:
565
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
566
+
567
+ # copy payload headers
568
+ assert body.headers
569
+ for (key, value) in body.headers.items():
570
+ if key in self.headers:
571
+ continue
572
+ if key in self.skip_auto_headers:
573
+ continue
574
+ self.headers[key] = value
575
+
576
+ def update_expect_continue(self, expect: bool = False) -> None:
577
+ if expect:
578
+ self.headers[hdrs.EXPECT] = "100-continue"
579
+ elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
580
+ expect = True
581
+
582
+ if expect:
583
+ self._continue = self.loop.create_future()
584
+
585
+ def update_proxy(
586
+ self,
587
+ proxy: Optional[URL],
588
+ proxy_auth: Optional[BasicAuth],
589
+ proxy_headers: Optional[LooseHeaders],
590
+ ) -> None:
591
+ if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
592
+ raise ValueError("proxy_auth must be None or BasicAuth() tuple")
593
+ self.proxy = proxy
594
+ self.proxy_auth = proxy_auth
595
+ self.proxy_headers = proxy_headers
596
+
597
+ def keep_alive(self) -> bool:
598
+ if self.version < HttpVersion10:
599
+ # keep alive not supported at all
600
+ return False
601
+ if self.version == HttpVersion10:
602
+ if self.headers.get(hdrs.CONNECTION) == "keep-alive":
603
+ return True
604
+ else: # no headers means we close for Http 1.0
605
+ return False
606
+ elif self.headers.get(hdrs.CONNECTION) == "close":
607
+ return False
608
+
609
+ return True
610
+
611
+ async def write_bytes(
612
+ self, writer: AbstractStreamWriter, conn: "Connection"
613
+ ) -> None:
614
+ """Support coroutines that yields bytes objects."""
615
+ # 100 response
616
+ if self._continue is not None:
617
+ try:
618
+ await writer.drain()
619
+ await self._continue
620
+ except asyncio.CancelledError:
621
+ return
622
+
623
+ protocol = conn.protocol
624
+ assert protocol is not None
625
+ try:
626
+ if isinstance(self.body, payload.Payload):
627
+ await self.body.write(writer)
628
+ else:
629
+ if isinstance(self.body, (bytes, bytearray)):
630
+ self.body = (self.body,) # type: ignore[assignment]
631
+
632
+ for chunk in self.body:
633
+ await writer.write(chunk) # type: ignore[arg-type]
634
+ except OSError as underlying_exc:
635
+ reraised_exc = underlying_exc
636
+
637
+ exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
638
+ underlying_exc, asyncio.TimeoutError
639
+ )
640
+ if exc_is_not_timeout:
641
+ reraised_exc = ClientOSError(
642
+ underlying_exc.errno,
643
+ f"Can not write request body for {self.url !s}",
644
+ )
645
+
646
+ set_exception(protocol, reraised_exc, underlying_exc)
647
+ except asyncio.CancelledError:
648
+ await writer.write_eof()
649
+ except Exception as underlying_exc:
650
+ set_exception(
651
+ protocol,
652
+ ClientConnectionError(
653
+ f"Failed to send bytes into the underlying connection {conn !s}",
654
+ ),
655
+ underlying_exc,
656
+ )
657
+ else:
658
+ await writer.write_eof()
659
+ protocol.start_timeout()
660
+
661
+ async def send(self, conn: "Connection") -> "ClientResponse":
662
+ # Specify request target:
663
+ # - CONNECT request must send authority form URI
664
+ # - not CONNECT proxy must send absolute form URI
665
+ # - most common is origin form URI
666
+ if self.method == hdrs.METH_CONNECT:
667
+ connect_host = self.url.raw_host
668
+ assert connect_host is not None
669
+ if helpers.is_ipv6_address(connect_host):
670
+ connect_host = f"[{connect_host}]"
671
+ path = f"{connect_host}:{self.url.port}"
672
+ elif self.proxy and not self.is_ssl():
673
+ path = str(self.url)
674
+ else:
675
+ path = self.url.raw_path
676
+ if self.url.raw_query_string:
677
+ path += "?" + self.url.raw_query_string
678
+
679
+ protocol = conn.protocol
680
+ assert protocol is not None
681
+ writer = StreamWriter(
682
+ protocol,
683
+ self.loop,
684
+ on_chunk_sent=functools.partial(
685
+ self._on_chunk_request_sent, self.method, self.url
686
+ ),
687
+ on_headers_sent=functools.partial(
688
+ self._on_headers_request_sent, self.method, self.url
689
+ ),
690
+ )
691
+
692
+ if self.compress:
693
+ writer.enable_compression(self.compress)
694
+
695
+ if self.chunked is not None:
696
+ writer.enable_chunking()
697
+
698
+ # set default content-type
699
+ if (
700
+ self.method in self.POST_METHODS
701
+ and hdrs.CONTENT_TYPE not in self.skip_auto_headers
702
+ and hdrs.CONTENT_TYPE not in self.headers
703
+ ):
704
+ self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
705
+
706
+ # set the connection header
707
+ connection = self.headers.get(hdrs.CONNECTION)
708
+ if not connection:
709
+ if self.keep_alive():
710
+ if self.version == HttpVersion10:
711
+ connection = "keep-alive"
712
+ else:
713
+ if self.version == HttpVersion11:
714
+ connection = "close"
715
+
716
+ if connection is not None:
717
+ self.headers[hdrs.CONNECTION] = connection
718
+
719
+ # status + headers
720
+ status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format(
721
+ self.method, path, v=self.version
722
+ )
723
+ await writer.write_headers(status_line, self.headers)
724
+
725
+ self._writer = self.loop.create_task(self.write_bytes(writer, conn))
726
+
727
+ response_class = self.response_class
728
+ assert response_class is not None
729
+ self.response = response_class(
730
+ self.method,
731
+ self.original_url,
732
+ writer=self._writer,
733
+ continue100=self._continue,
734
+ timer=self._timer,
735
+ request_info=self.request_info,
736
+ traces=self._traces,
737
+ loop=self.loop,
738
+ session=self._session,
739
+ )
740
+ return self.response
741
+
742
+ async def close(self) -> None:
743
+ if self._writer is not None:
744
+ with contextlib.suppress(asyncio.CancelledError):
745
+ await self._writer
746
+
747
+ def terminate(self) -> None:
748
+ if self._writer is not None:
749
+ if not self.loop.is_closed():
750
+ self._writer.cancel()
751
+ self._writer.remove_done_callback(self.__reset_writer)
752
+ self._writer = None
753
+
754
+ async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
755
+ for trace in self._traces:
756
+ await trace.send_request_chunk_sent(method, url, chunk)
757
+
758
+ async def _on_headers_request_sent(
759
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
760
+ ) -> None:
761
+ for trace in self._traces:
762
+ await trace.send_request_headers(method, url, headers)
763
+
764
+
765
+ class ClientResponse(HeadersMixin):
766
+
767
+ # Some of these attributes are None when created,
768
+ # but will be set by the start() method.
769
+ # As the end user will likely never see the None values, we cheat the types below.
770
+ # from the Status-Line of the response
771
+ version: Optional[HttpVersion] = None # HTTP-Version
772
+ status: int = None # type: ignore[assignment] # Status-Code
773
+ reason: Optional[str] = None # Reason-Phrase
774
+
775
+ content: StreamReader = None # type: ignore[assignment] # Payload stream
776
+ _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
777
+ _raw_headers: RawHeaders = None # type: ignore[assignment]
778
+
779
+ _connection = None # current connection
780
+ _source_traceback: Optional[traceback.StackSummary] = None
781
+ # set up by ClientRequest after ClientResponse object creation
782
+ # post-init stage allows to not change ctor signature
783
+ _closed = True # to allow __del__ for non-initialized properly response
784
+ _released = False
785
+ __writer = None
786
+
787
+ def __init__(
788
+ self,
789
+ method: str,
790
+ url: URL,
791
+ *,
792
+ writer: "asyncio.Task[None]",
793
+ continue100: Optional["asyncio.Future[bool]"],
794
+ timer: BaseTimerContext,
795
+ request_info: RequestInfo,
796
+ traces: List["Trace"],
797
+ loop: asyncio.AbstractEventLoop,
798
+ session: "ClientSession",
799
+ ) -> None:
800
+ assert isinstance(url, URL)
801
+
802
+ self.method = method
803
+ self.cookies = SimpleCookie()
804
+
805
+ self._real_url = url
806
+ self._url = url.with_fragment(None)
807
+ self._body: Any = None
808
+ self._writer: Optional[asyncio.Task[None]] = writer
809
+ self._continue = continue100 # None by default
810
+ self._closed = True
811
+ self._history: Tuple[ClientResponse, ...] = ()
812
+ self._request_info = request_info
813
+ self._timer = timer if timer is not None else TimerNoop()
814
+ self._cache: Dict[str, Any] = {}
815
+ self._traces = traces
816
+ self._loop = loop
817
+ # store a reference to session #1985
818
+ self._session: Optional[ClientSession] = session
819
+ # Save reference to _resolve_charset, so that get_encoding() will still
820
+ # work after the response has finished reading the body.
821
+ if session is None:
822
+ # TODO: Fix session=None in tests (see ClientRequest.__init__).
823
+ self._resolve_charset: Callable[
824
+ ["ClientResponse", bytes], str
825
+ ] = lambda *_: "utf-8"
826
+ else:
827
+ self._resolve_charset = session._resolve_charset
828
+ if loop.get_debug():
829
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
830
+
831
+ def __reset_writer(self, _: object = None) -> None:
832
+ self.__writer = None
833
+
834
+ @property
835
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
836
+ return self.__writer
837
+
838
+ @_writer.setter
839
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
840
+ if self.__writer is not None:
841
+ self.__writer.remove_done_callback(self.__reset_writer)
842
+ self.__writer = writer
843
+ if writer is not None:
844
+ writer.add_done_callback(self.__reset_writer)
845
+
846
+ @reify
847
+ def url(self) -> URL:
848
+ return self._url
849
+
850
+ @reify
851
+ def url_obj(self) -> URL:
852
+ warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
853
+ return self._url
854
+
855
+ @reify
856
+ def real_url(self) -> URL:
857
+ return self._real_url
858
+
859
+ @reify
860
+ def host(self) -> str:
861
+ assert self._url.host is not None
862
+ return self._url.host
863
+
864
+ @reify
865
+ def headers(self) -> "CIMultiDictProxy[str]":
866
+ return self._headers
867
+
868
+ @reify
869
+ def raw_headers(self) -> RawHeaders:
870
+ return self._raw_headers
871
+
872
+ @reify
873
+ def request_info(self) -> RequestInfo:
874
+ return self._request_info
875
+
876
+ @reify
877
+ def content_disposition(self) -> Optional[ContentDisposition]:
878
+ raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
879
+ if raw is None:
880
+ return None
881
+ disposition_type, params_dct = multipart.parse_content_disposition(raw)
882
+ params = MappingProxyType(params_dct)
883
+ filename = multipart.content_disposition_filename(params)
884
+ return ContentDisposition(disposition_type, params, filename)
885
+
886
+ def __del__(self, _warnings: Any = warnings) -> None:
887
+ if self._closed:
888
+ return
889
+
890
+ if self._connection is not None:
891
+ self._connection.release()
892
+ self._cleanup_writer()
893
+
894
+ if self._loop.get_debug():
895
+ kwargs = {"source": self}
896
+ _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
897
+ context = {"client_response": self, "message": "Unclosed response"}
898
+ if self._source_traceback:
899
+ context["source_traceback"] = self._source_traceback
900
+ self._loop.call_exception_handler(context)
901
+
902
+ def __repr__(self) -> str:
903
+ out = io.StringIO()
904
+ ascii_encodable_url = str(self.url)
905
+ if self.reason:
906
+ ascii_encodable_reason = self.reason.encode(
907
+ "ascii", "backslashreplace"
908
+ ).decode("ascii")
909
+ else:
910
+ ascii_encodable_reason = "None"
911
+ print(
912
+ "<ClientResponse({}) [{} {}]>".format(
913
+ ascii_encodable_url, self.status, ascii_encodable_reason
914
+ ),
915
+ file=out,
916
+ )
917
+ print(self.headers, file=out)
918
+ return out.getvalue()
919
+
920
+ @property
921
+ def connection(self) -> Optional["Connection"]:
922
+ return self._connection
923
+
924
+ @reify
925
+ def history(self) -> Tuple["ClientResponse", ...]:
926
+ """A sequence of of responses, if redirects occurred."""
927
+ return self._history
928
+
929
+ @reify
930
+ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
931
+ links_str = ", ".join(self.headers.getall("link", []))
932
+
933
+ if not links_str:
934
+ return MultiDictProxy(MultiDict())
935
+
936
+ links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
937
+
938
+ for val in re.split(r",(?=\s*<)", links_str):
939
+ match = re.match(r"\s*<(.*)>(.*)", val)
940
+ if match is None: # pragma: no cover
941
+ # the check exists to suppress mypy error
942
+ continue
943
+ url, params_str = match.groups()
944
+ params = params_str.split(";")[1:]
945
+
946
+ link: MultiDict[Union[str, URL]] = MultiDict()
947
+
948
+ for param in params:
949
+ match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
950
+ if match is None: # pragma: no cover
951
+ # the check exists to suppress mypy error
952
+ continue
953
+ key, _, value, _ = match.groups()
954
+
955
+ link.add(key, value)
956
+
957
+ key = link.get("rel", url)
958
+
959
+ link.add("url", self.url.join(URL(url)))
960
+
961
+ links.add(str(key), MultiDictProxy(link))
962
+
963
+ return MultiDictProxy(links)
964
+
965
+ async def start(self, connection: "Connection") -> "ClientResponse":
966
+ """Start response processing."""
967
+ self._closed = False
968
+ self._protocol = connection.protocol
969
+ self._connection = connection
970
+
971
+ with self._timer:
972
+ while True:
973
+ # read response
974
+ try:
975
+ protocol = self._protocol
976
+ message, payload = await protocol.read() # type: ignore[union-attr]
977
+ except http.HttpProcessingError as exc:
978
+ raise ClientResponseError(
979
+ self.request_info,
980
+ self.history,
981
+ status=exc.code,
982
+ message=exc.message,
983
+ headers=exc.headers,
984
+ ) from exc
985
+
986
+ if message.code < 100 or message.code > 199 or message.code == 101:
987
+ break
988
+
989
+ if self._continue is not None:
990
+ set_result(self._continue, True)
991
+ self._continue = None
992
+
993
+ # payload eof handler
994
+ payload.on_eof(self._response_eof)
995
+
996
+ # response status
997
+ self.version = message.version
998
+ self.status = message.code
999
+ self.reason = message.reason
1000
+
1001
+ # headers
1002
+ self._headers = message.headers # type is CIMultiDictProxy
1003
+ self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
1004
+
1005
+ # payload
1006
+ self.content = payload
1007
+
1008
+ # cookies
1009
+ for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
1010
+ try:
1011
+ self.cookies.load(hdr)
1012
+ except CookieError as exc:
1013
+ client_logger.warning("Can not load response cookies: %s", exc)
1014
+ return self
1015
+
1016
+ def _response_eof(self) -> None:
1017
+ if self._closed:
1018
+ return
1019
+
1020
+ # protocol could be None because connection could be detached
1021
+ protocol = self._connection and self._connection.protocol
1022
+ if protocol is not None and protocol.upgraded:
1023
+ return
1024
+
1025
+ self._closed = True
1026
+ self._cleanup_writer()
1027
+ self._release_connection()
1028
+
1029
+ @property
1030
+ def closed(self) -> bool:
1031
+ return self._closed
1032
+
1033
+ def close(self) -> None:
1034
+ if not self._released:
1035
+ self._notify_content()
1036
+
1037
+ self._closed = True
1038
+ if self._loop is None or self._loop.is_closed():
1039
+ return
1040
+
1041
+ self._cleanup_writer()
1042
+ if self._connection is not None:
1043
+ self._connection.close()
1044
+ self._connection = None
1045
+
1046
+ def release(self) -> Any:
1047
+ if not self._released:
1048
+ self._notify_content()
1049
+
1050
+ self._closed = True
1051
+
1052
+ self._cleanup_writer()
1053
+ self._release_connection()
1054
+ return noop()
1055
+
1056
+ @property
1057
+ def ok(self) -> bool:
1058
+ """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
1059
+
1060
+ This is **not** a check for ``200 OK`` but a check that the response
1061
+ status is under 400.
1062
+ """
1063
+ return 400 > self.status
1064
+
1065
+ def raise_for_status(self) -> None:
1066
+ if not self.ok:
1067
+ # reason should always be not None for a started response
1068
+ assert self.reason is not None
1069
+ self.release()
1070
+ raise ClientResponseError(
1071
+ self.request_info,
1072
+ self.history,
1073
+ status=self.status,
1074
+ message=self.reason,
1075
+ headers=self.headers,
1076
+ )
1077
+
1078
+ def _release_connection(self) -> None:
1079
+ if self._connection is not None:
1080
+ if self._writer is None:
1081
+ self._connection.release()
1082
+ self._connection = None
1083
+ else:
1084
+ self._writer.add_done_callback(lambda f: self._release_connection())
1085
+
1086
+ async def _wait_released(self) -> None:
1087
+ if self._writer is not None:
1088
+ await self._writer
1089
+ self._release_connection()
1090
+
1091
+ def _cleanup_writer(self) -> None:
1092
+ if self._writer is not None:
1093
+ self._writer.cancel()
1094
+ self._session = None
1095
+
1096
+ def _notify_content(self) -> None:
1097
+ content = self.content
1098
+ if content and content.exception() is None:
1099
+ set_exception(content, ClientConnectionError("Connection closed"))
1100
+ self._released = True
1101
+
1102
+ async def wait_for_close(self) -> None:
1103
+ if self._writer is not None:
1104
+ await self._writer
1105
+ self.release()
1106
+
1107
+ async def read(self) -> bytes:
1108
+ """Read response payload."""
1109
+ if self._body is None:
1110
+ try:
1111
+ self._body = await self.content.read()
1112
+ for trace in self._traces:
1113
+ await trace.send_response_chunk_received(
1114
+ self.method, self.url, self._body
1115
+ )
1116
+ except BaseException:
1117
+ self.close()
1118
+ raise
1119
+ elif self._released: # Response explicitly released
1120
+ raise ClientConnectionError("Connection closed")
1121
+
1122
+ protocol = self._connection and self._connection.protocol
1123
+ if protocol is None or not protocol.upgraded:
1124
+ await self._wait_released() # Underlying connection released
1125
+ return self._body # type: ignore[no-any-return]
1126
+
1127
+ def get_encoding(self) -> str:
1128
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
1129
+ mimetype = helpers.parse_mimetype(ctype)
1130
+
1131
+ encoding = mimetype.parameters.get("charset")
1132
+ if encoding:
1133
+ with contextlib.suppress(LookupError):
1134
+ return codecs.lookup(encoding).name
1135
+
1136
+ if mimetype.type == "application" and (
1137
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
1138
+ ):
1139
+ # RFC 7159 states that the default encoding is UTF-8.
1140
+ # RFC 7483 defines application/rdap+json
1141
+ return "utf-8"
1142
+
1143
+ if self._body is None:
1144
+ raise RuntimeError(
1145
+ "Cannot compute fallback encoding of a not yet read body"
1146
+ )
1147
+
1148
+ return self._resolve_charset(self, self._body)
1149
+
1150
+ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
1151
+ """Read response payload and decode."""
1152
+ if self._body is None:
1153
+ await self.read()
1154
+
1155
+ if encoding is None:
1156
+ encoding = self.get_encoding()
1157
+
1158
+ return self._body.decode( # type: ignore[no-any-return,union-attr]
1159
+ encoding, errors=errors
1160
+ )
1161
+
1162
+ async def json(
1163
+ self,
1164
+ *,
1165
+ encoding: Optional[str] = None,
1166
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
1167
+ content_type: Optional[str] = "application/json",
1168
+ ) -> Any:
1169
+ """Read and decodes JSON response."""
1170
+ if self._body is None:
1171
+ await self.read()
1172
+
1173
+ if content_type:
1174
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
1175
+ if not _is_expected_content_type(ctype, content_type):
1176
+ raise ContentTypeError(
1177
+ self.request_info,
1178
+ self.history,
1179
+ message=(
1180
+ "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
1181
+ ),
1182
+ headers=self.headers,
1183
+ )
1184
+
1185
+ stripped = self._body.strip() # type: ignore[union-attr]
1186
+ if not stripped:
1187
+ return None
1188
+
1189
+ if encoding is None:
1190
+ encoding = self.get_encoding()
1191
+
1192
+ return loads(stripped.decode(encoding))
1193
+
1194
+ async def __aenter__(self) -> "ClientResponse":
1195
+ return self
1196
+
1197
+ async def __aexit__(
1198
+ self,
1199
+ exc_type: Optional[Type[BaseException]],
1200
+ exc_val: Optional[BaseException],
1201
+ exc_tb: Optional[TracebackType],
1202
+ ) -> None:
1203
+ # similar to _RequestContextManager, we do not need to check
1204
+ # for exceptions, response object can close connection
1205
+ # if state is broken
1206
+ self.release()
1207
+ await self.wait_for_close()
venv/lib/python3.10/site-packages/aiohttp/client_ws.py ADDED
@@ -0,0 +1,315 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """WebSocket client for asyncio."""
2
+
3
+ import asyncio
4
+ import sys
5
+ from typing import Any, Optional, cast
6
+
7
+ from .client_exceptions import ClientError
8
+ from .client_reqrep import ClientResponse
9
+ from .helpers import call_later, set_result
10
+ from .http import (
11
+ WS_CLOSED_MESSAGE,
12
+ WS_CLOSING_MESSAGE,
13
+ WebSocketError,
14
+ WSCloseCode,
15
+ WSMessage,
16
+ WSMsgType,
17
+ )
18
+ from .http_websocket import WebSocketWriter # WSMessage
19
+ from .streams import EofStream, FlowControlDataQueue
20
+ from .typedefs import (
21
+ DEFAULT_JSON_DECODER,
22
+ DEFAULT_JSON_ENCODER,
23
+ JSONDecoder,
24
+ JSONEncoder,
25
+ )
26
+
27
+ if sys.version_info >= (3, 11):
28
+ import asyncio as async_timeout
29
+ else:
30
+ import async_timeout
31
+
32
+
33
+ class ClientWebSocketResponse:
34
+ def __init__(
35
+ self,
36
+ reader: "FlowControlDataQueue[WSMessage]",
37
+ writer: WebSocketWriter,
38
+ protocol: Optional[str],
39
+ response: ClientResponse,
40
+ timeout: float,
41
+ autoclose: bool,
42
+ autoping: bool,
43
+ loop: asyncio.AbstractEventLoop,
44
+ *,
45
+ receive_timeout: Optional[float] = None,
46
+ heartbeat: Optional[float] = None,
47
+ compress: int = 0,
48
+ client_notakeover: bool = False,
49
+ ) -> None:
50
+ self._response = response
51
+ self._conn = response.connection
52
+
53
+ self._writer = writer
54
+ self._reader = reader
55
+ self._protocol = protocol
56
+ self._closed = False
57
+ self._closing = False
58
+ self._close_code: Optional[int] = None
59
+ self._timeout = timeout
60
+ self._receive_timeout = receive_timeout
61
+ self._autoclose = autoclose
62
+ self._autoping = autoping
63
+ self._heartbeat = heartbeat
64
+ self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
65
+ if heartbeat is not None:
66
+ self._pong_heartbeat = heartbeat / 2.0
67
+ self._pong_response_cb: Optional[asyncio.TimerHandle] = None
68
+ self._loop = loop
69
+ self._waiting: Optional[asyncio.Future[bool]] = None
70
+ self._exception: Optional[BaseException] = None
71
+ self._compress = compress
72
+ self._client_notakeover = client_notakeover
73
+
74
+ self._reset_heartbeat()
75
+
76
+ def _cancel_heartbeat(self) -> None:
77
+ if self._pong_response_cb is not None:
78
+ self._pong_response_cb.cancel()
79
+ self._pong_response_cb = None
80
+
81
+ if self._heartbeat_cb is not None:
82
+ self._heartbeat_cb.cancel()
83
+ self._heartbeat_cb = None
84
+
85
+ def _reset_heartbeat(self) -> None:
86
+ self._cancel_heartbeat()
87
+
88
+ if self._heartbeat is not None:
89
+ self._heartbeat_cb = call_later(
90
+ self._send_heartbeat,
91
+ self._heartbeat,
92
+ self._loop,
93
+ timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
94
+ if self._conn is not None
95
+ else 5,
96
+ )
97
+
98
+ def _send_heartbeat(self) -> None:
99
+ if self._heartbeat is not None and not self._closed:
100
+ # fire-and-forget a task is not perfect but maybe ok for
101
+ # sending ping. Otherwise we need a long-living heartbeat
102
+ # task in the class.
103
+ self._loop.create_task(self._writer.ping())
104
+
105
+ if self._pong_response_cb is not None:
106
+ self._pong_response_cb.cancel()
107
+ self._pong_response_cb = call_later(
108
+ self._pong_not_received,
109
+ self._pong_heartbeat,
110
+ self._loop,
111
+ timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
112
+ if self._conn is not None
113
+ else 5,
114
+ )
115
+
116
+ def _pong_not_received(self) -> None:
117
+ if not self._closed:
118
+ self._closed = True
119
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
120
+ self._exception = asyncio.TimeoutError()
121
+ self._response.close()
122
+
123
+ @property
124
+ def closed(self) -> bool:
125
+ return self._closed
126
+
127
+ @property
128
+ def close_code(self) -> Optional[int]:
129
+ return self._close_code
130
+
131
+ @property
132
+ def protocol(self) -> Optional[str]:
133
+ return self._protocol
134
+
135
+ @property
136
+ def compress(self) -> int:
137
+ return self._compress
138
+
139
+ @property
140
+ def client_notakeover(self) -> bool:
141
+ return self._client_notakeover
142
+
143
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
144
+ """extra info from connection transport"""
145
+ conn = self._response.connection
146
+ if conn is None:
147
+ return default
148
+ transport = conn.transport
149
+ if transport is None:
150
+ return default
151
+ return transport.get_extra_info(name, default)
152
+
153
+ def exception(self) -> Optional[BaseException]:
154
+ return self._exception
155
+
156
+ async def ping(self, message: bytes = b"") -> None:
157
+ await self._writer.ping(message)
158
+
159
+ async def pong(self, message: bytes = b"") -> None:
160
+ await self._writer.pong(message)
161
+
162
+ async def send_str(self, data: str, compress: Optional[int] = None) -> None:
163
+ if not isinstance(data, str):
164
+ raise TypeError("data argument must be str (%r)" % type(data))
165
+ await self._writer.send(data, binary=False, compress=compress)
166
+
167
+ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
168
+ if not isinstance(data, (bytes, bytearray, memoryview)):
169
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
170
+ await self._writer.send(data, binary=True, compress=compress)
171
+
172
+ async def send_json(
173
+ self,
174
+ data: Any,
175
+ compress: Optional[int] = None,
176
+ *,
177
+ dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
178
+ ) -> None:
179
+ await self.send_str(dumps(data), compress=compress)
180
+
181
+ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
182
+ # we need to break `receive()` cycle first,
183
+ # `close()` may be called from different task
184
+ if self._waiting is not None and not self._closing:
185
+ self._closing = True
186
+ self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
187
+ await self._waiting
188
+
189
+ if not self._closed:
190
+ self._cancel_heartbeat()
191
+ self._closed = True
192
+ try:
193
+ await self._writer.close(code, message)
194
+ except asyncio.CancelledError:
195
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
196
+ self._response.close()
197
+ raise
198
+ except Exception as exc:
199
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
200
+ self._exception = exc
201
+ self._response.close()
202
+ return True
203
+
204
+ if self._close_code:
205
+ self._response.close()
206
+ return True
207
+
208
+ while True:
209
+ try:
210
+ async with async_timeout.timeout(self._timeout):
211
+ msg = await self._reader.read()
212
+ except asyncio.CancelledError:
213
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
214
+ self._response.close()
215
+ raise
216
+ except Exception as exc:
217
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
218
+ self._exception = exc
219
+ self._response.close()
220
+ return True
221
+
222
+ if msg.type == WSMsgType.CLOSE:
223
+ self._close_code = msg.data
224
+ self._response.close()
225
+ return True
226
+ else:
227
+ return False
228
+
229
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
230
+ while True:
231
+ if self._waiting is not None:
232
+ raise RuntimeError("Concurrent call to receive() is not allowed")
233
+
234
+ if self._closed:
235
+ return WS_CLOSED_MESSAGE
236
+ elif self._closing:
237
+ await self.close()
238
+ return WS_CLOSED_MESSAGE
239
+
240
+ try:
241
+ self._waiting = self._loop.create_future()
242
+ try:
243
+ async with async_timeout.timeout(timeout or self._receive_timeout):
244
+ msg = await self._reader.read()
245
+ self._reset_heartbeat()
246
+ finally:
247
+ waiter = self._waiting
248
+ self._waiting = None
249
+ set_result(waiter, True)
250
+ except (asyncio.CancelledError, asyncio.TimeoutError):
251
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
252
+ raise
253
+ except EofStream:
254
+ self._close_code = WSCloseCode.OK
255
+ await self.close()
256
+ return WSMessage(WSMsgType.CLOSED, None, None)
257
+ except ClientError:
258
+ self._closed = True
259
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
260
+ return WS_CLOSED_MESSAGE
261
+ except WebSocketError as exc:
262
+ self._close_code = exc.code
263
+ await self.close(code=exc.code)
264
+ return WSMessage(WSMsgType.ERROR, exc, None)
265
+ except Exception as exc:
266
+ self._exception = exc
267
+ self._closing = True
268
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
269
+ await self.close()
270
+ return WSMessage(WSMsgType.ERROR, exc, None)
271
+
272
+ if msg.type == WSMsgType.CLOSE:
273
+ self._closing = True
274
+ self._close_code = msg.data
275
+ if not self._closed and self._autoclose:
276
+ await self.close()
277
+ elif msg.type == WSMsgType.CLOSING:
278
+ self._closing = True
279
+ elif msg.type == WSMsgType.PING and self._autoping:
280
+ await self.pong(msg.data)
281
+ continue
282
+ elif msg.type == WSMsgType.PONG and self._autoping:
283
+ continue
284
+
285
+ return msg
286
+
287
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
288
+ msg = await self.receive(timeout)
289
+ if msg.type != WSMsgType.TEXT:
290
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
291
+ return cast(str, msg.data)
292
+
293
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
294
+ msg = await self.receive(timeout)
295
+ if msg.type != WSMsgType.BINARY:
296
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
297
+ return cast(bytes, msg.data)
298
+
299
+ async def receive_json(
300
+ self,
301
+ *,
302
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
303
+ timeout: Optional[float] = None,
304
+ ) -> Any:
305
+ data = await self.receive_str(timeout=timeout)
306
+ return loads(data)
307
+
308
+ def __aiter__(self) -> "ClientWebSocketResponse":
309
+ return self
310
+
311
+ async def __anext__(self) -> WSMessage:
312
+ msg = await self.receive()
313
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
314
+ raise StopAsyncIteration
315
+ return msg
venv/lib/python3.10/site-packages/aiohttp/cookiejar.py ADDED
@@ -0,0 +1,419 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import calendar
3
+ import contextlib
4
+ import datetime
5
+ import os # noqa
6
+ import pathlib
7
+ import pickle
8
+ import re
9
+ import time
10
+ from collections import defaultdict
11
+ from http.cookies import BaseCookie, Morsel, SimpleCookie
12
+ from math import ceil
13
+ from typing import ( # noqa
14
+ DefaultDict,
15
+ Dict,
16
+ Iterable,
17
+ Iterator,
18
+ List,
19
+ Mapping,
20
+ Optional,
21
+ Set,
22
+ Tuple,
23
+ Union,
24
+ cast,
25
+ )
26
+
27
+ from yarl import URL
28
+
29
+ from .abc import AbstractCookieJar, ClearCookiePredicate
30
+ from .helpers import is_ip_address
31
+ from .typedefs import LooseCookies, PathLike, StrOrURL
32
+
33
+ __all__ = ("CookieJar", "DummyCookieJar")
34
+
35
+
36
+ CookieItem = Union[str, "Morsel[str]"]
37
+
38
+
39
+ class CookieJar(AbstractCookieJar):
40
+ """Implements cookie storage adhering to RFC 6265."""
41
+
42
+ DATE_TOKENS_RE = re.compile(
43
+ r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
44
+ r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
45
+ )
46
+
47
+ DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
48
+
49
+ DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
50
+
51
+ DATE_MONTH_RE = re.compile(
52
+ "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
53
+ re.I,
54
+ )
55
+
56
+ DATE_YEAR_RE = re.compile(r"(\d{2,4})")
57
+
58
+ # calendar.timegm() fails for timestamps after datetime.datetime.max
59
+ # Minus one as a loss of precision occurs when timestamp() is called.
60
+ MAX_TIME = (
61
+ int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
62
+ )
63
+ try:
64
+ calendar.timegm(time.gmtime(MAX_TIME))
65
+ except (OSError, ValueError):
66
+ # Hit the maximum representable time on Windows
67
+ # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
68
+ # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere
69
+ MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
70
+ except OverflowError:
71
+ # #4515: datetime.max may not be representable on 32-bit platforms
72
+ MAX_TIME = 2**31 - 1
73
+ # Avoid minuses in the future, 3x faster
74
+ SUB_MAX_TIME = MAX_TIME - 1
75
+
76
+ def __init__(
77
+ self,
78
+ *,
79
+ unsafe: bool = False,
80
+ quote_cookie: bool = True,
81
+ treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
82
+ loop: Optional[asyncio.AbstractEventLoop] = None,
83
+ ) -> None:
84
+ super().__init__(loop=loop)
85
+ self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
86
+ SimpleCookie
87
+ )
88
+ self._host_only_cookies: Set[Tuple[str, str]] = set()
89
+ self._unsafe = unsafe
90
+ self._quote_cookie = quote_cookie
91
+ if treat_as_secure_origin is None:
92
+ treat_as_secure_origin = []
93
+ elif isinstance(treat_as_secure_origin, URL):
94
+ treat_as_secure_origin = [treat_as_secure_origin.origin()]
95
+ elif isinstance(treat_as_secure_origin, str):
96
+ treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
97
+ else:
98
+ treat_as_secure_origin = [
99
+ URL(url).origin() if isinstance(url, str) else url.origin()
100
+ for url in treat_as_secure_origin
101
+ ]
102
+ self._treat_as_secure_origin = treat_as_secure_origin
103
+ self._next_expiration: float = ceil(time.time())
104
+ self._expirations: Dict[Tuple[str, str, str], float] = {}
105
+
106
+ def save(self, file_path: PathLike) -> None:
107
+ file_path = pathlib.Path(file_path)
108
+ with file_path.open(mode="wb") as f:
109
+ pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
110
+
111
+ def load(self, file_path: PathLike) -> None:
112
+ file_path = pathlib.Path(file_path)
113
+ with file_path.open(mode="rb") as f:
114
+ self._cookies = pickle.load(f)
115
+
116
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
117
+ if predicate is None:
118
+ self._next_expiration = ceil(time.time())
119
+ self._cookies.clear()
120
+ self._host_only_cookies.clear()
121
+ self._expirations.clear()
122
+ return
123
+
124
+ to_del = []
125
+ now = time.time()
126
+ for (domain, path), cookie in self._cookies.items():
127
+ for name, morsel in cookie.items():
128
+ key = (domain, path, name)
129
+ if (
130
+ key in self._expirations and self._expirations[key] <= now
131
+ ) or predicate(morsel):
132
+ to_del.append(key)
133
+
134
+ for domain, path, name in to_del:
135
+ self._host_only_cookies.discard((domain, name))
136
+ key = (domain, path, name)
137
+ if key in self._expirations:
138
+ del self._expirations[(domain, path, name)]
139
+ self._cookies[(domain, path)].pop(name, None)
140
+
141
+ self._next_expiration = (
142
+ min(*self._expirations.values(), self.SUB_MAX_TIME) + 1
143
+ if self._expirations
144
+ else self.MAX_TIME
145
+ )
146
+
147
+ def clear_domain(self, domain: str) -> None:
148
+ self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
149
+
150
+ def __iter__(self) -> "Iterator[Morsel[str]]":
151
+ self._do_expiration()
152
+ for val in self._cookies.values():
153
+ yield from val.values()
154
+
155
+ def __len__(self) -> int:
156
+ return sum(1 for i in self)
157
+
158
+ def _do_expiration(self) -> None:
159
+ self.clear(lambda x: False)
160
+
161
+ def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
162
+ self._next_expiration = min(self._next_expiration, when)
163
+ self._expirations[(domain, path, name)] = when
164
+
165
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
166
+ """Update cookies."""
167
+ hostname = response_url.raw_host
168
+
169
+ if not self._unsafe and is_ip_address(hostname):
170
+ # Don't accept cookies from IPs
171
+ return
172
+
173
+ if isinstance(cookies, Mapping):
174
+ cookies = cookies.items()
175
+
176
+ for name, cookie in cookies:
177
+ if not isinstance(cookie, Morsel):
178
+ tmp = SimpleCookie()
179
+ tmp[name] = cookie # type: ignore[assignment]
180
+ cookie = tmp[name]
181
+
182
+ domain = cookie["domain"]
183
+
184
+ # ignore domains with trailing dots
185
+ if domain.endswith("."):
186
+ domain = ""
187
+ del cookie["domain"]
188
+
189
+ if not domain and hostname is not None:
190
+ # Set the cookie's domain to the response hostname
191
+ # and set its host-only-flag
192
+ self._host_only_cookies.add((hostname, name))
193
+ domain = cookie["domain"] = hostname
194
+
195
+ if domain.startswith("."):
196
+ # Remove leading dot
197
+ domain = domain[1:]
198
+ cookie["domain"] = domain
199
+
200
+ if hostname and not self._is_domain_match(domain, hostname):
201
+ # Setting cookies for different domains is not allowed
202
+ continue
203
+
204
+ path = cookie["path"]
205
+ if not path or not path.startswith("/"):
206
+ # Set the cookie's path to the response path
207
+ path = response_url.path
208
+ if not path.startswith("/"):
209
+ path = "/"
210
+ else:
211
+ # Cut everything from the last slash to the end
212
+ path = "/" + path[1 : path.rfind("/")]
213
+ cookie["path"] = path
214
+
215
+ max_age = cookie["max-age"]
216
+ if max_age:
217
+ try:
218
+ delta_seconds = int(max_age)
219
+ max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
220
+ self._expire_cookie(max_age_expiration, domain, path, name)
221
+ except ValueError:
222
+ cookie["max-age"] = ""
223
+
224
+ else:
225
+ expires = cookie["expires"]
226
+ if expires:
227
+ expire_time = self._parse_date(expires)
228
+ if expire_time:
229
+ self._expire_cookie(expire_time, domain, path, name)
230
+ else:
231
+ cookie["expires"] = ""
232
+
233
+ self._cookies[(domain, path)][name] = cookie
234
+
235
+ self._do_expiration()
236
+
237
+ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
238
+ """Returns this jar's cookies filtered by their attributes."""
239
+ filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
240
+ SimpleCookie() if self._quote_cookie else BaseCookie()
241
+ )
242
+ if not self._cookies:
243
+ # Skip do_expiration() if there are no cookies.
244
+ return filtered
245
+ self._do_expiration()
246
+ if not self._cookies:
247
+ # Skip rest of function if no non-expired cookies.
248
+ return filtered
249
+ request_url = URL(request_url)
250
+ hostname = request_url.raw_host or ""
251
+
252
+ is_not_secure = request_url.scheme not in ("https", "wss")
253
+ if is_not_secure and self._treat_as_secure_origin:
254
+ request_origin = URL()
255
+ with contextlib.suppress(ValueError):
256
+ request_origin = request_url.origin()
257
+ is_not_secure = request_origin not in self._treat_as_secure_origin
258
+
259
+ # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
260
+ for cookie in sorted(self, key=lambda c: len(c["path"])):
261
+ name = cookie.key
262
+ domain = cookie["domain"]
263
+
264
+ # Send shared cookies
265
+ if not domain:
266
+ filtered[name] = cookie.value
267
+ continue
268
+
269
+ if not self._unsafe and is_ip_address(hostname):
270
+ continue
271
+
272
+ if (domain, name) in self._host_only_cookies:
273
+ if domain != hostname:
274
+ continue
275
+ elif not self._is_domain_match(domain, hostname):
276
+ continue
277
+
278
+ if not self._is_path_match(request_url.path, cookie["path"]):
279
+ continue
280
+
281
+ if is_not_secure and cookie["secure"]:
282
+ continue
283
+
284
+ # It's critical we use the Morsel so the coded_value
285
+ # (based on cookie version) is preserved
286
+ mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
287
+ mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
288
+ filtered[name] = mrsl_val
289
+
290
+ return filtered
291
+
292
+ @staticmethod
293
+ def _is_domain_match(domain: str, hostname: str) -> bool:
294
+ """Implements domain matching adhering to RFC 6265."""
295
+ if hostname == domain:
296
+ return True
297
+
298
+ if not hostname.endswith(domain):
299
+ return False
300
+
301
+ non_matching = hostname[: -len(domain)]
302
+
303
+ if not non_matching.endswith("."):
304
+ return False
305
+
306
+ return not is_ip_address(hostname)
307
+
308
+ @staticmethod
309
+ def _is_path_match(req_path: str, cookie_path: str) -> bool:
310
+ """Implements path matching adhering to RFC 6265."""
311
+ if not req_path.startswith("/"):
312
+ req_path = "/"
313
+
314
+ if req_path == cookie_path:
315
+ return True
316
+
317
+ if not req_path.startswith(cookie_path):
318
+ return False
319
+
320
+ if cookie_path.endswith("/"):
321
+ return True
322
+
323
+ non_matching = req_path[len(cookie_path) :]
324
+
325
+ return non_matching.startswith("/")
326
+
327
+ @classmethod
328
+ def _parse_date(cls, date_str: str) -> Optional[int]:
329
+ """Implements date string parsing adhering to RFC 6265."""
330
+ if not date_str:
331
+ return None
332
+
333
+ found_time = False
334
+ found_day = False
335
+ found_month = False
336
+ found_year = False
337
+
338
+ hour = minute = second = 0
339
+ day = 0
340
+ month = 0
341
+ year = 0
342
+
343
+ for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
344
+
345
+ token = token_match.group("token")
346
+
347
+ if not found_time:
348
+ time_match = cls.DATE_HMS_TIME_RE.match(token)
349
+ if time_match:
350
+ found_time = True
351
+ hour, minute, second = (int(s) for s in time_match.groups())
352
+ continue
353
+
354
+ if not found_day:
355
+ day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
356
+ if day_match:
357
+ found_day = True
358
+ day = int(day_match.group())
359
+ continue
360
+
361
+ if not found_month:
362
+ month_match = cls.DATE_MONTH_RE.match(token)
363
+ if month_match:
364
+ found_month = True
365
+ assert month_match.lastindex is not None
366
+ month = month_match.lastindex
367
+ continue
368
+
369
+ if not found_year:
370
+ year_match = cls.DATE_YEAR_RE.match(token)
371
+ if year_match:
372
+ found_year = True
373
+ year = int(year_match.group())
374
+
375
+ if 70 <= year <= 99:
376
+ year += 1900
377
+ elif 0 <= year <= 69:
378
+ year += 2000
379
+
380
+ if False in (found_day, found_month, found_year, found_time):
381
+ return None
382
+
383
+ if not 1 <= day <= 31:
384
+ return None
385
+
386
+ if year < 1601 or hour > 23 or minute > 59 or second > 59:
387
+ return None
388
+
389
+ return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
390
+
391
+
392
+ class DummyCookieJar(AbstractCookieJar):
393
+ """Implements a dummy cookie storage.
394
+
395
+ It can be used with the ClientSession when no cookie processing is needed.
396
+
397
+ """
398
+
399
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
400
+ super().__init__(loop=loop)
401
+
402
+ def __iter__(self) -> "Iterator[Morsel[str]]":
403
+ while False:
404
+ yield None
405
+
406
+ def __len__(self) -> int:
407
+ return 0
408
+
409
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
410
+ pass
411
+
412
+ def clear_domain(self, domain: str) -> None:
413
+ pass
414
+
415
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
416
+ pass
417
+
418
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
419
+ return SimpleCookie()
venv/lib/python3.10/site-packages/aiohttp/hdrs.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP Headers constants."""
2
+
3
+ # After changing the file content call ./tools/gen.py
4
+ # to regenerate the headers parser
5
+ from typing import Final, Set
6
+
7
+ from multidict import istr
8
+
9
+ METH_ANY: Final[str] = "*"
10
+ METH_CONNECT: Final[str] = "CONNECT"
11
+ METH_HEAD: Final[str] = "HEAD"
12
+ METH_GET: Final[str] = "GET"
13
+ METH_DELETE: Final[str] = "DELETE"
14
+ METH_OPTIONS: Final[str] = "OPTIONS"
15
+ METH_PATCH: Final[str] = "PATCH"
16
+ METH_POST: Final[str] = "POST"
17
+ METH_PUT: Final[str] = "PUT"
18
+ METH_TRACE: Final[str] = "TRACE"
19
+
20
+ METH_ALL: Final[Set[str]] = {
21
+ METH_CONNECT,
22
+ METH_HEAD,
23
+ METH_GET,
24
+ METH_DELETE,
25
+ METH_OPTIONS,
26
+ METH_PATCH,
27
+ METH_POST,
28
+ METH_PUT,
29
+ METH_TRACE,
30
+ }
31
+
32
+ ACCEPT: Final[istr] = istr("Accept")
33
+ ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
34
+ ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
35
+ ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
36
+ ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
37
+ ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
38
+ ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
39
+ ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
40
+ ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
41
+ ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
42
+ ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
43
+ ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
44
+ ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
45
+ AGE: Final[istr] = istr("Age")
46
+ ALLOW: Final[istr] = istr("Allow")
47
+ AUTHORIZATION: Final[istr] = istr("Authorization")
48
+ CACHE_CONTROL: Final[istr] = istr("Cache-Control")
49
+ CONNECTION: Final[istr] = istr("Connection")
50
+ CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
51
+ CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
52
+ CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
53
+ CONTENT_LENGTH: Final[istr] = istr("Content-Length")
54
+ CONTENT_LOCATION: Final[istr] = istr("Content-Location")
55
+ CONTENT_MD5: Final[istr] = istr("Content-MD5")
56
+ CONTENT_RANGE: Final[istr] = istr("Content-Range")
57
+ CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
58
+ CONTENT_TYPE: Final[istr] = istr("Content-Type")
59
+ COOKIE: Final[istr] = istr("Cookie")
60
+ DATE: Final[istr] = istr("Date")
61
+ DESTINATION: Final[istr] = istr("Destination")
62
+ DIGEST: Final[istr] = istr("Digest")
63
+ ETAG: Final[istr] = istr("Etag")
64
+ EXPECT: Final[istr] = istr("Expect")
65
+ EXPIRES: Final[istr] = istr("Expires")
66
+ FORWARDED: Final[istr] = istr("Forwarded")
67
+ FROM: Final[istr] = istr("From")
68
+ HOST: Final[istr] = istr("Host")
69
+ IF_MATCH: Final[istr] = istr("If-Match")
70
+ IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
71
+ IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
72
+ IF_RANGE: Final[istr] = istr("If-Range")
73
+ IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
74
+ KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
75
+ LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
76
+ LAST_MODIFIED: Final[istr] = istr("Last-Modified")
77
+ LINK: Final[istr] = istr("Link")
78
+ LOCATION: Final[istr] = istr("Location")
79
+ MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
80
+ ORIGIN: Final[istr] = istr("Origin")
81
+ PRAGMA: Final[istr] = istr("Pragma")
82
+ PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
83
+ PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
84
+ RANGE: Final[istr] = istr("Range")
85
+ REFERER: Final[istr] = istr("Referer")
86
+ RETRY_AFTER: Final[istr] = istr("Retry-After")
87
+ SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
88
+ SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
89
+ SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
90
+ SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
91
+ SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
92
+ SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
93
+ SERVER: Final[istr] = istr("Server")
94
+ SET_COOKIE: Final[istr] = istr("Set-Cookie")
95
+ TE: Final[istr] = istr("TE")
96
+ TRAILER: Final[istr] = istr("Trailer")
97
+ TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
98
+ UPGRADE: Final[istr] = istr("Upgrade")
99
+ URI: Final[istr] = istr("URI")
100
+ USER_AGENT: Final[istr] = istr("User-Agent")
101
+ VARY: Final[istr] = istr("Vary")
102
+ VIA: Final[istr] = istr("Via")
103
+ WANT_DIGEST: Final[istr] = istr("Want-Digest")
104
+ WARNING: Final[istr] = istr("Warning")
105
+ WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
106
+ X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
107
+ X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
108
+ X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
venv/lib/python3.10/site-packages/aiohttp/helpers.py ADDED
@@ -0,0 +1,1029 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Various helper functions"""
2
+
3
+ import asyncio
4
+ import base64
5
+ import binascii
6
+ import contextlib
7
+ import datetime
8
+ import enum
9
+ import functools
10
+ import inspect
11
+ import netrc
12
+ import os
13
+ import platform
14
+ import re
15
+ import sys
16
+ import time
17
+ import warnings
18
+ import weakref
19
+ from collections import namedtuple
20
+ from contextlib import suppress
21
+ from email.parser import HeaderParser
22
+ from email.utils import parsedate
23
+ from math import ceil
24
+ from pathlib import Path
25
+ from types import TracebackType
26
+ from typing import (
27
+ Any,
28
+ Callable,
29
+ ContextManager,
30
+ Dict,
31
+ Generator,
32
+ Generic,
33
+ Iterable,
34
+ Iterator,
35
+ List,
36
+ Mapping,
37
+ Optional,
38
+ Pattern,
39
+ Protocol,
40
+ Tuple,
41
+ Type,
42
+ TypeVar,
43
+ Union,
44
+ get_args,
45
+ overload,
46
+ )
47
+ from urllib.parse import quote
48
+ from urllib.request import getproxies, proxy_bypass
49
+
50
+ import attr
51
+ from multidict import MultiDict, MultiDictProxy, MultiMapping
52
+ from yarl import URL
53
+
54
+ from . import hdrs
55
+ from .log import client_logger, internal_logger
56
+
57
+ if sys.version_info >= (3, 11):
58
+ import asyncio as async_timeout
59
+ else:
60
+ import async_timeout
61
+
62
+ __all__ = ("BasicAuth", "ChainMapProxy", "ETag")
63
+
64
+ IS_MACOS = platform.system() == "Darwin"
65
+ IS_WINDOWS = platform.system() == "Windows"
66
+
67
+ PY_310 = sys.version_info >= (3, 10)
68
+ PY_311 = sys.version_info >= (3, 11)
69
+
70
+
71
+ _T = TypeVar("_T")
72
+ _S = TypeVar("_S")
73
+
74
+ _SENTINEL = enum.Enum("_SENTINEL", "sentinel")
75
+ sentinel = _SENTINEL.sentinel
76
+
77
+ NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
78
+
79
+ DEBUG = sys.flags.dev_mode or (
80
+ not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
81
+ )
82
+
83
+
84
+ CHAR = {chr(i) for i in range(0, 128)}
85
+ CTL = {chr(i) for i in range(0, 32)} | {
86
+ chr(127),
87
+ }
88
+ SEPARATORS = {
89
+ "(",
90
+ ")",
91
+ "<",
92
+ ">",
93
+ "@",
94
+ ",",
95
+ ";",
96
+ ":",
97
+ "\\",
98
+ '"',
99
+ "/",
100
+ "[",
101
+ "]",
102
+ "?",
103
+ "=",
104
+ "{",
105
+ "}",
106
+ " ",
107
+ chr(9),
108
+ }
109
+ TOKEN = CHAR ^ CTL ^ SEPARATORS
110
+
111
+
112
+ class noop:
113
+ def __await__(self) -> Generator[None, None, None]:
114
+ yield
115
+
116
+
117
+ class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
118
+ """Http basic authentication helper."""
119
+
120
+ def __new__(
121
+ cls, login: str, password: str = "", encoding: str = "latin1"
122
+ ) -> "BasicAuth":
123
+ if login is None:
124
+ raise ValueError("None is not allowed as login value")
125
+
126
+ if password is None:
127
+ raise ValueError("None is not allowed as password value")
128
+
129
+ if ":" in login:
130
+ raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
131
+
132
+ return super().__new__(cls, login, password, encoding)
133
+
134
+ @classmethod
135
+ def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
136
+ """Create a BasicAuth object from an Authorization HTTP header."""
137
+ try:
138
+ auth_type, encoded_credentials = auth_header.split(" ", 1)
139
+ except ValueError:
140
+ raise ValueError("Could not parse authorization header.")
141
+
142
+ if auth_type.lower() != "basic":
143
+ raise ValueError("Unknown authorization method %s" % auth_type)
144
+
145
+ try:
146
+ decoded = base64.b64decode(
147
+ encoded_credentials.encode("ascii"), validate=True
148
+ ).decode(encoding)
149
+ except binascii.Error:
150
+ raise ValueError("Invalid base64 encoding.")
151
+
152
+ try:
153
+ # RFC 2617 HTTP Authentication
154
+ # https://www.ietf.org/rfc/rfc2617.txt
155
+ # the colon must be present, but the username and password may be
156
+ # otherwise blank.
157
+ username, password = decoded.split(":", 1)
158
+ except ValueError:
159
+ raise ValueError("Invalid credentials.")
160
+
161
+ return cls(username, password, encoding=encoding)
162
+
163
+ @classmethod
164
+ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
165
+ """Create BasicAuth from url."""
166
+ if not isinstance(url, URL):
167
+ raise TypeError("url should be yarl.URL instance")
168
+ if url.user is None:
169
+ return None
170
+ return cls(url.user, url.password or "", encoding=encoding)
171
+
172
+ def encode(self) -> str:
173
+ """Encode credentials."""
174
+ creds = (f"{self.login}:{self.password}").encode(self.encoding)
175
+ return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
176
+
177
+
178
+ def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
179
+ auth = BasicAuth.from_url(url)
180
+ if auth is None:
181
+ return url, None
182
+ else:
183
+ return url.with_user(None), auth
184
+
185
+
186
+ def netrc_from_env() -> Optional[netrc.netrc]:
187
+ """Load netrc from file.
188
+
189
+ Attempt to load it from the path specified by the env-var
190
+ NETRC or in the default location in the user's home directory.
191
+
192
+ Returns None if it couldn't be found or fails to parse.
193
+ """
194
+ netrc_env = os.environ.get("NETRC")
195
+
196
+ if netrc_env is not None:
197
+ netrc_path = Path(netrc_env)
198
+ else:
199
+ try:
200
+ home_dir = Path.home()
201
+ except RuntimeError as e: # pragma: no cover
202
+ # if pathlib can't resolve home, it may raise a RuntimeError
203
+ client_logger.debug(
204
+ "Could not resolve home directory when "
205
+ "trying to look for .netrc file: %s",
206
+ e,
207
+ )
208
+ return None
209
+
210
+ netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
211
+
212
+ try:
213
+ return netrc.netrc(str(netrc_path))
214
+ except netrc.NetrcParseError as e:
215
+ client_logger.warning("Could not parse .netrc file: %s", e)
216
+ except OSError as e:
217
+ netrc_exists = False
218
+ with contextlib.suppress(OSError):
219
+ netrc_exists = netrc_path.is_file()
220
+ # we couldn't read the file (doesn't exist, permissions, etc.)
221
+ if netrc_env or netrc_exists:
222
+ # only warn if the environment wanted us to load it,
223
+ # or it appears like the default file does actually exist
224
+ client_logger.warning("Could not read .netrc file: %s", e)
225
+
226
+ return None
227
+
228
+
229
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
230
+ class ProxyInfo:
231
+ proxy: URL
232
+ proxy_auth: Optional[BasicAuth]
233
+
234
+
235
+ def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
236
+ """
237
+ Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
238
+
239
+ :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
240
+ entry is found for the ``host``.
241
+ """
242
+ if netrc_obj is None:
243
+ raise LookupError("No .netrc file found")
244
+ auth_from_netrc = netrc_obj.authenticators(host)
245
+
246
+ if auth_from_netrc is None:
247
+ raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
248
+ login, account, password = auth_from_netrc
249
+
250
+ # TODO(PY311): username = login or account
251
+ # Up to python 3.10, account could be None if not specified,
252
+ # and login will be empty string if not specified. From 3.11,
253
+ # login and account will be empty string if not specified.
254
+ username = login if (login or account is None) else account
255
+
256
+ # TODO(PY311): Remove this, as password will be empty string
257
+ # if not specified
258
+ if password is None:
259
+ password = ""
260
+
261
+ return BasicAuth(username, password)
262
+
263
+
264
+ def proxies_from_env() -> Dict[str, ProxyInfo]:
265
+ proxy_urls = {
266
+ k: URL(v)
267
+ for k, v in getproxies().items()
268
+ if k in ("http", "https", "ws", "wss")
269
+ }
270
+ netrc_obj = netrc_from_env()
271
+ stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
272
+ ret = {}
273
+ for proto, val in stripped.items():
274
+ proxy, auth = val
275
+ if proxy.scheme in ("https", "wss"):
276
+ client_logger.warning(
277
+ "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
278
+ )
279
+ continue
280
+ if netrc_obj and auth is None:
281
+ if proxy.host is not None:
282
+ try:
283
+ auth = basicauth_from_netrc(netrc_obj, proxy.host)
284
+ except LookupError:
285
+ auth = None
286
+ ret[proto] = ProxyInfo(proxy, auth)
287
+ return ret
288
+
289
+
290
+ def current_task(
291
+ loop: Optional[asyncio.AbstractEventLoop] = None,
292
+ ) -> "Optional[asyncio.Task[Any]]":
293
+ return asyncio.current_task(loop=loop)
294
+
295
+
296
+ def get_running_loop(
297
+ loop: Optional[asyncio.AbstractEventLoop] = None,
298
+ ) -> asyncio.AbstractEventLoop:
299
+ if loop is None:
300
+ loop = asyncio.get_event_loop()
301
+ if not loop.is_running():
302
+ warnings.warn(
303
+ "The object should be created within an async function",
304
+ DeprecationWarning,
305
+ stacklevel=3,
306
+ )
307
+ if loop.get_debug():
308
+ internal_logger.warning(
309
+ "The object should be created within an async function", stack_info=True
310
+ )
311
+ return loop
312
+
313
+
314
+ def isasyncgenfunction(obj: Any) -> bool:
315
+ func = getattr(inspect, "isasyncgenfunction", None)
316
+ if func is not None:
317
+ return func(obj) # type: ignore[no-any-return]
318
+ else:
319
+ return False
320
+
321
+
322
+ def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
323
+ """Get a permitted proxy for the given URL from the env."""
324
+ if url.host is not None and proxy_bypass(url.host):
325
+ raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
326
+
327
+ proxies_in_env = proxies_from_env()
328
+ try:
329
+ proxy_info = proxies_in_env[url.scheme]
330
+ except KeyError:
331
+ raise LookupError(f"No proxies found for `{url!s}` in the env")
332
+ else:
333
+ return proxy_info.proxy, proxy_info.proxy_auth
334
+
335
+
336
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
337
+ class MimeType:
338
+ type: str
339
+ subtype: str
340
+ suffix: str
341
+ parameters: "MultiDictProxy[str]"
342
+
343
+
344
+ @functools.lru_cache(maxsize=56)
345
+ def parse_mimetype(mimetype: str) -> MimeType:
346
+ """Parses a MIME type into its components.
347
+
348
+ mimetype is a MIME type string.
349
+
350
+ Returns a MimeType object.
351
+
352
+ Example:
353
+
354
+ >>> parse_mimetype('text/html; charset=utf-8')
355
+ MimeType(type='text', subtype='html', suffix='',
356
+ parameters={'charset': 'utf-8'})
357
+
358
+ """
359
+ if not mimetype:
360
+ return MimeType(
361
+ type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
362
+ )
363
+
364
+ parts = mimetype.split(";")
365
+ params: MultiDict[str] = MultiDict()
366
+ for item in parts[1:]:
367
+ if not item:
368
+ continue
369
+ key, _, value = item.partition("=")
370
+ params.add(key.lower().strip(), value.strip(' "'))
371
+
372
+ fulltype = parts[0].strip().lower()
373
+ if fulltype == "*":
374
+ fulltype = "*/*"
375
+
376
+ mtype, _, stype = fulltype.partition("/")
377
+ stype, _, suffix = stype.partition("+")
378
+
379
+ return MimeType(
380
+ type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
381
+ )
382
+
383
+
384
+ def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
385
+ name = getattr(obj, "name", None)
386
+ if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
387
+ return Path(name).name
388
+ return default
389
+
390
+
391
+ not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
392
+ QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
393
+
394
+
395
+ def quoted_string(content: str) -> str:
396
+ """Return 7-bit content as quoted-string.
397
+
398
+ Format content into a quoted-string as defined in RFC5322 for
399
+ Internet Message Format. Notice that this is not the 8-bit HTTP
400
+ format, but the 7-bit email format. Content must be in usascii or
401
+ a ValueError is raised.
402
+ """
403
+ if not (QCONTENT > set(content)):
404
+ raise ValueError(f"bad content for quoted-string {content!r}")
405
+ return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
406
+
407
+
408
+ def content_disposition_header(
409
+ disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
410
+ ) -> str:
411
+ """Sets ``Content-Disposition`` header for MIME.
412
+
413
+ This is the MIME payload Content-Disposition header from RFC 2183
414
+ and RFC 7579 section 4.2, not the HTTP Content-Disposition from
415
+ RFC 6266.
416
+
417
+ disptype is a disposition type: inline, attachment, form-data.
418
+ Should be valid extension token (see RFC 2183)
419
+
420
+ quote_fields performs value quoting to 7-bit MIME headers
421
+ according to RFC 7578. Set to quote_fields to False if recipient
422
+ can take 8-bit file names and field values.
423
+
424
+ _charset specifies the charset to use when quote_fields is True.
425
+
426
+ params is a dict with disposition params.
427
+ """
428
+ if not disptype or not (TOKEN > set(disptype)):
429
+ raise ValueError("bad content disposition type {!r}" "".format(disptype))
430
+
431
+ value = disptype
432
+ if params:
433
+ lparams = []
434
+ for key, val in params.items():
435
+ if not key or not (TOKEN > set(key)):
436
+ raise ValueError(
437
+ "bad content disposition parameter" " {!r}={!r}".format(key, val)
438
+ )
439
+ if quote_fields:
440
+ if key.lower() == "filename":
441
+ qval = quote(val, "", encoding=_charset)
442
+ lparams.append((key, '"%s"' % qval))
443
+ else:
444
+ try:
445
+ qval = quoted_string(val)
446
+ except ValueError:
447
+ qval = "".join(
448
+ (_charset, "''", quote(val, "", encoding=_charset))
449
+ )
450
+ lparams.append((key + "*", qval))
451
+ else:
452
+ lparams.append((key, '"%s"' % qval))
453
+ else:
454
+ qval = val.replace("\\", "\\\\").replace('"', '\\"')
455
+ lparams.append((key, '"%s"' % qval))
456
+ sparams = "; ".join("=".join(pair) for pair in lparams)
457
+ value = "; ".join((value, sparams))
458
+ return value
459
+
460
+
461
+ class _TSelf(Protocol, Generic[_T]):
462
+ _cache: Dict[str, _T]
463
+
464
+
465
+ class reify(Generic[_T]):
466
+ """Use as a class method decorator.
467
+
468
+ It operates almost exactly like
469
+ the Python `@property` decorator, but it puts the result of the
470
+ method it decorates into the instance dict after the first call,
471
+ effectively replacing the function it decorates with an instance
472
+ variable. It is, in Python parlance, a data descriptor.
473
+ """
474
+
475
+ def __init__(self, wrapped: Callable[..., _T]) -> None:
476
+ self.wrapped = wrapped
477
+ self.__doc__ = wrapped.__doc__
478
+ self.name = wrapped.__name__
479
+
480
+ def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
481
+ try:
482
+ try:
483
+ return inst._cache[self.name]
484
+ except KeyError:
485
+ val = self.wrapped(inst)
486
+ inst._cache[self.name] = val
487
+ return val
488
+ except AttributeError:
489
+ if inst is None:
490
+ return self
491
+ raise
492
+
493
+ def __set__(self, inst: _TSelf[_T], value: _T) -> None:
494
+ raise AttributeError("reified property is read-only")
495
+
496
+
497
+ reify_py = reify
498
+
499
+ try:
500
+ from ._helpers import reify as reify_c
501
+
502
+ if not NO_EXTENSIONS:
503
+ reify = reify_c # type: ignore[misc,assignment]
504
+ except ImportError:
505
+ pass
506
+
507
+ _ipv4_pattern = (
508
+ r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
509
+ r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
510
+ )
511
+ _ipv6_pattern = (
512
+ r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
513
+ r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
514
+ r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
515
+ r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
516
+ r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
517
+ r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
518
+ r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
519
+ r":|:(:[A-F0-9]{1,4}){7})$"
520
+ )
521
+ _ipv4_regex = re.compile(_ipv4_pattern)
522
+ _ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
523
+ _ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
524
+ _ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
525
+
526
+
527
+ def _is_ip_address(
528
+ regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
529
+ ) -> bool:
530
+ if host is None:
531
+ return False
532
+ if isinstance(host, str):
533
+ return bool(regex.match(host))
534
+ elif isinstance(host, (bytes, bytearray, memoryview)):
535
+ return bool(regexb.match(host))
536
+ else:
537
+ raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
538
+
539
+
540
+ is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
541
+ is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
542
+
543
+
544
+ def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
545
+ return is_ipv4_address(host) or is_ipv6_address(host)
546
+
547
+
548
+ _cached_current_datetime: Optional[int] = None
549
+ _cached_formatted_datetime = ""
550
+
551
+
552
+ def rfc822_formatted_time() -> str:
553
+ global _cached_current_datetime
554
+ global _cached_formatted_datetime
555
+
556
+ now = int(time.time())
557
+ if now != _cached_current_datetime:
558
+ # Weekday and month names for HTTP date/time formatting;
559
+ # always English!
560
+ # Tuples are constants stored in codeobject!
561
+ _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
562
+ _monthname = (
563
+ "", # Dummy so we can use 1-based month numbers
564
+ "Jan",
565
+ "Feb",
566
+ "Mar",
567
+ "Apr",
568
+ "May",
569
+ "Jun",
570
+ "Jul",
571
+ "Aug",
572
+ "Sep",
573
+ "Oct",
574
+ "Nov",
575
+ "Dec",
576
+ )
577
+
578
+ year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
579
+ _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
580
+ _weekdayname[wd],
581
+ day,
582
+ _monthname[month],
583
+ year,
584
+ hh,
585
+ mm,
586
+ ss,
587
+ )
588
+ _cached_current_datetime = now
589
+ return _cached_formatted_datetime
590
+
591
+
592
+ def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
593
+ ref, name = info
594
+ ob = ref()
595
+ if ob is not None:
596
+ with suppress(Exception):
597
+ getattr(ob, name)()
598
+
599
+
600
+ def weakref_handle(
601
+ ob: object,
602
+ name: str,
603
+ timeout: float,
604
+ loop: asyncio.AbstractEventLoop,
605
+ timeout_ceil_threshold: float = 5,
606
+ ) -> Optional[asyncio.TimerHandle]:
607
+ if timeout is not None and timeout > 0:
608
+ when = loop.time() + timeout
609
+ if timeout >= timeout_ceil_threshold:
610
+ when = ceil(when)
611
+
612
+ return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
613
+ return None
614
+
615
+
616
+ def call_later(
617
+ cb: Callable[[], Any],
618
+ timeout: float,
619
+ loop: asyncio.AbstractEventLoop,
620
+ timeout_ceil_threshold: float = 5,
621
+ ) -> Optional[asyncio.TimerHandle]:
622
+ if timeout is not None and timeout > 0:
623
+ when = loop.time() + timeout
624
+ if timeout > timeout_ceil_threshold:
625
+ when = ceil(when)
626
+ return loop.call_at(when, cb)
627
+ return None
628
+
629
+
630
+ class TimeoutHandle:
631
+ """Timeout handle"""
632
+
633
+ def __init__(
634
+ self,
635
+ loop: asyncio.AbstractEventLoop,
636
+ timeout: Optional[float],
637
+ ceil_threshold: float = 5,
638
+ ) -> None:
639
+ self._timeout = timeout
640
+ self._loop = loop
641
+ self._ceil_threshold = ceil_threshold
642
+ self._callbacks: List[
643
+ Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
644
+ ] = []
645
+
646
+ def register(
647
+ self, callback: Callable[..., None], *args: Any, **kwargs: Any
648
+ ) -> None:
649
+ self._callbacks.append((callback, args, kwargs))
650
+
651
+ def close(self) -> None:
652
+ self._callbacks.clear()
653
+
654
+ def start(self) -> Optional[asyncio.Handle]:
655
+ timeout = self._timeout
656
+ if timeout is not None and timeout > 0:
657
+ when = self._loop.time() + timeout
658
+ if timeout >= self._ceil_threshold:
659
+ when = ceil(when)
660
+ return self._loop.call_at(when, self.__call__)
661
+ else:
662
+ return None
663
+
664
+ def timer(self) -> "BaseTimerContext":
665
+ if self._timeout is not None and self._timeout > 0:
666
+ timer = TimerContext(self._loop)
667
+ self.register(timer.timeout)
668
+ return timer
669
+ else:
670
+ return TimerNoop()
671
+
672
+ def __call__(self) -> None:
673
+ for cb, args, kwargs in self._callbacks:
674
+ with suppress(Exception):
675
+ cb(*args, **kwargs)
676
+
677
+ self._callbacks.clear()
678
+
679
+
680
+ class BaseTimerContext(ContextManager["BaseTimerContext"]):
681
+ def assert_timeout(self) -> None:
682
+ """Raise TimeoutError if timeout has been exceeded."""
683
+
684
+
685
+ class TimerNoop(BaseTimerContext):
686
+ def __enter__(self) -> BaseTimerContext:
687
+ return self
688
+
689
+ def __exit__(
690
+ self,
691
+ exc_type: Optional[Type[BaseException]],
692
+ exc_val: Optional[BaseException],
693
+ exc_tb: Optional[TracebackType],
694
+ ) -> None:
695
+ return
696
+
697
+
698
+ class TimerContext(BaseTimerContext):
699
+ """Low resolution timeout context manager"""
700
+
701
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
702
+ self._loop = loop
703
+ self._tasks: List[asyncio.Task[Any]] = []
704
+ self._cancelled = False
705
+
706
+ def assert_timeout(self) -> None:
707
+ """Raise TimeoutError if timer has already been cancelled."""
708
+ if self._cancelled:
709
+ raise asyncio.TimeoutError from None
710
+
711
+ def __enter__(self) -> BaseTimerContext:
712
+ task = current_task(loop=self._loop)
713
+
714
+ if task is None:
715
+ raise RuntimeError(
716
+ "Timeout context manager should be used " "inside a task"
717
+ )
718
+
719
+ if self._cancelled:
720
+ raise asyncio.TimeoutError from None
721
+
722
+ self._tasks.append(task)
723
+ return self
724
+
725
+ def __exit__(
726
+ self,
727
+ exc_type: Optional[Type[BaseException]],
728
+ exc_val: Optional[BaseException],
729
+ exc_tb: Optional[TracebackType],
730
+ ) -> Optional[bool]:
731
+ if self._tasks:
732
+ self._tasks.pop()
733
+
734
+ if exc_type is asyncio.CancelledError and self._cancelled:
735
+ raise asyncio.TimeoutError from None
736
+ return None
737
+
738
+ def timeout(self) -> None:
739
+ if not self._cancelled:
740
+ for task in set(self._tasks):
741
+ task.cancel()
742
+
743
+ self._cancelled = True
744
+
745
+
746
+ def ceil_timeout(
747
+ delay: Optional[float], ceil_threshold: float = 5
748
+ ) -> async_timeout.Timeout:
749
+ if delay is None or delay <= 0:
750
+ return async_timeout.timeout(None)
751
+
752
+ loop = get_running_loop()
753
+ now = loop.time()
754
+ when = now + delay
755
+ if delay > ceil_threshold:
756
+ when = ceil(when)
757
+ return async_timeout.timeout_at(when)
758
+
759
+
760
+ class HeadersMixin:
761
+ ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
762
+
763
+ _headers: MultiMapping[str]
764
+
765
+ _content_type: Optional[str] = None
766
+ _content_dict: Optional[Dict[str, str]] = None
767
+ _stored_content_type: Union[str, None, _SENTINEL] = sentinel
768
+
769
+ def _parse_content_type(self, raw: Optional[str]) -> None:
770
+ self._stored_content_type = raw
771
+ if raw is None:
772
+ # default value according to RFC 2616
773
+ self._content_type = "application/octet-stream"
774
+ self._content_dict = {}
775
+ else:
776
+ msg = HeaderParser().parsestr("Content-Type: " + raw)
777
+ self._content_type = msg.get_content_type()
778
+ params = msg.get_params(())
779
+ self._content_dict = dict(params[1:]) # First element is content type again
780
+
781
+ @property
782
+ def content_type(self) -> str:
783
+ """The value of content part for Content-Type HTTP header."""
784
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
785
+ if self._stored_content_type != raw:
786
+ self._parse_content_type(raw)
787
+ return self._content_type # type: ignore[return-value]
788
+
789
+ @property
790
+ def charset(self) -> Optional[str]:
791
+ """The value of charset part for Content-Type HTTP header."""
792
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
793
+ if self._stored_content_type != raw:
794
+ self._parse_content_type(raw)
795
+ return self._content_dict.get("charset") # type: ignore[union-attr]
796
+
797
+ @property
798
+ def content_length(self) -> Optional[int]:
799
+ """The value of Content-Length HTTP header."""
800
+ content_length = self._headers.get(hdrs.CONTENT_LENGTH)
801
+
802
+ if content_length is not None:
803
+ return int(content_length)
804
+ else:
805
+ return None
806
+
807
+
808
+ def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
809
+ if not fut.done():
810
+ fut.set_result(result)
811
+
812
+
813
+ _EXC_SENTINEL = BaseException()
814
+
815
+
816
+ class ErrorableProtocol(Protocol):
817
+ def set_exception(
818
+ self,
819
+ exc: BaseException,
820
+ exc_cause: BaseException = ...,
821
+ ) -> None:
822
+ ... # pragma: no cover
823
+
824
+
825
+ def set_exception(
826
+ fut: "asyncio.Future[_T] | ErrorableProtocol",
827
+ exc: BaseException,
828
+ exc_cause: BaseException = _EXC_SENTINEL,
829
+ ) -> None:
830
+ """Set future exception.
831
+
832
+ If the future is marked as complete, this function is a no-op.
833
+
834
+ :param exc_cause: An exception that is a direct cause of ``exc``.
835
+ Only set if provided.
836
+ """
837
+ if asyncio.isfuture(fut) and fut.done():
838
+ return
839
+
840
+ exc_is_sentinel = exc_cause is _EXC_SENTINEL
841
+ exc_causes_itself = exc is exc_cause
842
+ if not exc_is_sentinel and not exc_causes_itself:
843
+ exc.__cause__ = exc_cause
844
+
845
+ fut.set_exception(exc)
846
+
847
+
848
+ @functools.total_ordering
849
+ class AppKey(Generic[_T]):
850
+ """Keys for static typing support in Application."""
851
+
852
+ __slots__ = ("_name", "_t", "__orig_class__")
853
+
854
+ # This may be set by Python when instantiating with a generic type. We need to
855
+ # support this, in order to support types that are not concrete classes,
856
+ # like Iterable, which can't be passed as the second parameter to __init__.
857
+ __orig_class__: Type[object]
858
+
859
+ def __init__(self, name: str, t: Optional[Type[_T]] = None):
860
+ # Prefix with module name to help deduplicate key names.
861
+ frame = inspect.currentframe()
862
+ while frame:
863
+ if frame.f_code.co_name == "<module>":
864
+ module: str = frame.f_globals["__name__"]
865
+ break
866
+ frame = frame.f_back
867
+
868
+ self._name = module + "." + name
869
+ self._t = t
870
+
871
+ def __lt__(self, other: object) -> bool:
872
+ if isinstance(other, AppKey):
873
+ return self._name < other._name
874
+ return True # Order AppKey above other types.
875
+
876
+ def __repr__(self) -> str:
877
+ t = self._t
878
+ if t is None:
879
+ with suppress(AttributeError):
880
+ # Set to type arg.
881
+ t = get_args(self.__orig_class__)[0]
882
+
883
+ if t is None:
884
+ t_repr = "<<Unknown>>"
885
+ elif isinstance(t, type):
886
+ if t.__module__ == "builtins":
887
+ t_repr = t.__qualname__
888
+ else:
889
+ t_repr = f"{t.__module__}.{t.__qualname__}"
890
+ else:
891
+ t_repr = repr(t)
892
+ return f"<AppKey({self._name}, type={t_repr})>"
893
+
894
+
895
+ class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
896
+ __slots__ = ("_maps",)
897
+
898
+ def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
899
+ self._maps = tuple(maps)
900
+
901
+ def __init_subclass__(cls) -> None:
902
+ raise TypeError(
903
+ "Inheritance class {} from ChainMapProxy "
904
+ "is forbidden".format(cls.__name__)
905
+ )
906
+
907
+ @overload # type: ignore[override]
908
+ def __getitem__(self, key: AppKey[_T]) -> _T:
909
+ ...
910
+
911
+ @overload
912
+ def __getitem__(self, key: str) -> Any:
913
+ ...
914
+
915
+ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
916
+ for mapping in self._maps:
917
+ try:
918
+ return mapping[key]
919
+ except KeyError:
920
+ pass
921
+ raise KeyError(key)
922
+
923
+ @overload # type: ignore[override]
924
+ def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]:
925
+ ...
926
+
927
+ @overload
928
+ def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]:
929
+ ...
930
+
931
+ @overload
932
+ def get(self, key: str, default: Any = ...) -> Any:
933
+ ...
934
+
935
+ def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
936
+ try:
937
+ return self[key]
938
+ except KeyError:
939
+ return default
940
+
941
+ def __len__(self) -> int:
942
+ # reuses stored hash values if possible
943
+ return len(set().union(*self._maps))
944
+
945
+ def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
946
+ d: Dict[Union[str, AppKey[Any]], Any] = {}
947
+ for mapping in reversed(self._maps):
948
+ # reuses stored hash values if possible
949
+ d.update(mapping)
950
+ return iter(d)
951
+
952
+ def __contains__(self, key: object) -> bool:
953
+ return any(key in m for m in self._maps)
954
+
955
+ def __bool__(self) -> bool:
956
+ return any(self._maps)
957
+
958
+ def __repr__(self) -> str:
959
+ content = ", ".join(map(repr, self._maps))
960
+ return f"ChainMapProxy({content})"
961
+
962
+
963
+ # https://tools.ietf.org/html/rfc7232#section-2.3
964
+ _ETAGC = r"[!\x23-\x7E\x80-\xff]+"
965
+ _ETAGC_RE = re.compile(_ETAGC)
966
+ _QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
967
+ QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
968
+ LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
969
+
970
+ ETAG_ANY = "*"
971
+
972
+
973
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
974
+ class ETag:
975
+ value: str
976
+ is_weak: bool = False
977
+
978
+
979
+ def validate_etag_value(value: str) -> None:
980
+ if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
981
+ raise ValueError(
982
+ f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
983
+ )
984
+
985
+
986
+ def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
987
+ """Process a date string, return a datetime object"""
988
+ if date_str is not None:
989
+ timetuple = parsedate(date_str)
990
+ if timetuple is not None:
991
+ with suppress(ValueError):
992
+ return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
993
+ return None
994
+
995
+
996
+ def must_be_empty_body(method: str, code: int) -> bool:
997
+ """Check if a request must return an empty body."""
998
+ return (
999
+ status_code_must_be_empty_body(code)
1000
+ or method_must_be_empty_body(method)
1001
+ or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
1002
+ )
1003
+
1004
+
1005
+ def method_must_be_empty_body(method: str) -> bool:
1006
+ """Check if a method must return an empty body."""
1007
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
1008
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
1009
+ return method.upper() == hdrs.METH_HEAD
1010
+
1011
+
1012
+ def status_code_must_be_empty_body(code: int) -> bool:
1013
+ """Check if a status code must return an empty body."""
1014
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
1015
+ return code in {204, 304} or 100 <= code < 200
1016
+
1017
+
1018
+ def should_remove_content_length(method: str, code: int) -> bool:
1019
+ """Check if a Content-Length header should be removed.
1020
+
1021
+ This should always be a subset of must_be_empty_body
1022
+ """
1023
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
1024
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
1025
+ return (
1026
+ code in {204, 304}
1027
+ or 100 <= code < 200
1028
+ or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
1029
+ )