Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ckpts/universal/global_step40/zero/10.input_layernorm.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step40/zero/10.input_layernorm.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step40/zero/10.input_layernorm.weight/fp32.pt +3 -0
- ckpts/universal/global_step40/zero/16.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step40/zero/16.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step40/zero/16.mlp.dense_h_to_4h_swiglu.weight/fp32.pt +3 -0
- ckpts/universal/global_step40/zero/18.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step40/zero/8.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt +3 -0
- venv/lib/python3.10/site-packages/aiohttp/_helpers.pyi +6 -0
- venv/lib/python3.10/site-packages/aiohttp/compression_utils.py +157 -0
- venv/lib/python3.10/site-packages/aiohttp/connector.py +1511 -0
- venv/lib/python3.10/site-packages/aiohttp/formdata.py +182 -0
- venv/lib/python3.10/site-packages/aiohttp/http_exceptions.py +106 -0
- venv/lib/python3.10/site-packages/aiohttp/payload.py +463 -0
- venv/lib/python3.10/site-packages/aiohttp/pytest_plugin.py +381 -0
- venv/lib/python3.10/site-packages/aiohttp/tcp_helpers.py +37 -0
- venv/lib/python3.10/site-packages/aiohttp/web_protocol.py +698 -0
- venv/lib/python3.10/site-packages/aiohttp/web_server.py +77 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__init__.py +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_config.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_dataclasses.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_decorators_v1.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_fields.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_generics.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_git.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_repr.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_signature.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_std_types_schema.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_utils.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_validators.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/_config.py +334 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/_core_metadata.py +92 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/_core_utils.py +568 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/_dataclasses.py +230 -0
- venv/lib/python3.10/site-packages/pydantic/_internal/_decorators.py +791 -0
ckpts/universal/global_step40/zero/10.input_layernorm.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cd020cdba0aff64a2ab3594d2f02763129a007684b091faed3a088b8bb3cc512
|
3 |
+
size 9372
|
ckpts/universal/global_step40/zero/10.input_layernorm.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d0bc23d848c48fdc034293d76e4208207949b26de202fb8479afa8aab3f24428
|
3 |
+
size 9387
|
ckpts/universal/global_step40/zero/10.input_layernorm.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c4a9c33b9930618976f39e7edf08775a631ad6a1222ff04e5ccfb880f3c76d7c
|
3 |
+
size 9293
|
ckpts/universal/global_step40/zero/16.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4d61a43e101f8e63358ac76813ed5213ac8e6399e93cc664c05612bd60d4f1f8
|
3 |
+
size 33555612
|
ckpts/universal/global_step40/zero/16.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0cff1249cbc12e3f523940b2b9419fecf310ebe3f6fa37b0caefa1f6b77280f7
|
3 |
+
size 33555627
|
ckpts/universal/global_step40/zero/16.mlp.dense_h_to_4h_swiglu.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0a040fd8170ddbe4c33dba439cf75bbf764ed6a76f7531062dc0ab77b7485656
|
3 |
+
size 33555533
|
ckpts/universal/global_step40/zero/18.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a718064fe35999845107350bfdfe50385bb2709221a33378a657315479dfad9b
|
3 |
+
size 33555612
|
ckpts/universal/global_step40/zero/8.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:53611bb07b98fc18be95fe0a04287e88363a16dc0552e2edb01f61803c8faeba
|
3 |
+
size 33555612
|
venv/lib/python3.10/site-packages/aiohttp/_helpers.pyi
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Any
|
2 |
+
|
3 |
+
class reify:
|
4 |
+
def __init__(self, wrapped: Any) -> None: ...
|
5 |
+
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
6 |
+
def __set__(self, inst: Any, value: Any) -> None: ...
|
venv/lib/python3.10/site-packages/aiohttp/compression_utils.py
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import zlib
|
3 |
+
from concurrent.futures import Executor
|
4 |
+
from typing import Optional, cast
|
5 |
+
|
6 |
+
try:
|
7 |
+
try:
|
8 |
+
import brotlicffi as brotli
|
9 |
+
except ImportError:
|
10 |
+
import brotli
|
11 |
+
|
12 |
+
HAS_BROTLI = True
|
13 |
+
except ImportError: # pragma: no cover
|
14 |
+
HAS_BROTLI = False
|
15 |
+
|
16 |
+
MAX_SYNC_CHUNK_SIZE = 1024
|
17 |
+
|
18 |
+
|
19 |
+
def encoding_to_mode(
|
20 |
+
encoding: Optional[str] = None,
|
21 |
+
suppress_deflate_header: bool = False,
|
22 |
+
) -> int:
|
23 |
+
if encoding == "gzip":
|
24 |
+
return 16 + zlib.MAX_WBITS
|
25 |
+
|
26 |
+
return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
|
27 |
+
|
28 |
+
|
29 |
+
class ZlibBaseHandler:
|
30 |
+
def __init__(
|
31 |
+
self,
|
32 |
+
mode: int,
|
33 |
+
executor: Optional[Executor] = None,
|
34 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
35 |
+
):
|
36 |
+
self._mode = mode
|
37 |
+
self._executor = executor
|
38 |
+
self._max_sync_chunk_size = max_sync_chunk_size
|
39 |
+
|
40 |
+
|
41 |
+
class ZLibCompressor(ZlibBaseHandler):
|
42 |
+
def __init__(
|
43 |
+
self,
|
44 |
+
encoding: Optional[str] = None,
|
45 |
+
suppress_deflate_header: bool = False,
|
46 |
+
level: Optional[int] = None,
|
47 |
+
wbits: Optional[int] = None,
|
48 |
+
strategy: int = zlib.Z_DEFAULT_STRATEGY,
|
49 |
+
executor: Optional[Executor] = None,
|
50 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
51 |
+
):
|
52 |
+
super().__init__(
|
53 |
+
mode=encoding_to_mode(encoding, suppress_deflate_header)
|
54 |
+
if wbits is None
|
55 |
+
else wbits,
|
56 |
+
executor=executor,
|
57 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
58 |
+
)
|
59 |
+
if level is None:
|
60 |
+
self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
|
61 |
+
else:
|
62 |
+
self._compressor = zlib.compressobj(
|
63 |
+
wbits=self._mode, strategy=strategy, level=level
|
64 |
+
)
|
65 |
+
self._compress_lock = asyncio.Lock()
|
66 |
+
|
67 |
+
def compress_sync(self, data: bytes) -> bytes:
|
68 |
+
return self._compressor.compress(data)
|
69 |
+
|
70 |
+
async def compress(self, data: bytes) -> bytes:
|
71 |
+
async with self._compress_lock:
|
72 |
+
# To ensure the stream is consistent in the event
|
73 |
+
# there are multiple writers, we need to lock
|
74 |
+
# the compressor so that only one writer can
|
75 |
+
# compress at a time.
|
76 |
+
if (
|
77 |
+
self._max_sync_chunk_size is not None
|
78 |
+
and len(data) > self._max_sync_chunk_size
|
79 |
+
):
|
80 |
+
return await asyncio.get_event_loop().run_in_executor(
|
81 |
+
self._executor, self.compress_sync, data
|
82 |
+
)
|
83 |
+
return self.compress_sync(data)
|
84 |
+
|
85 |
+
def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
|
86 |
+
return self._compressor.flush(mode)
|
87 |
+
|
88 |
+
|
89 |
+
class ZLibDecompressor(ZlibBaseHandler):
|
90 |
+
def __init__(
|
91 |
+
self,
|
92 |
+
encoding: Optional[str] = None,
|
93 |
+
suppress_deflate_header: bool = False,
|
94 |
+
executor: Optional[Executor] = None,
|
95 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
96 |
+
):
|
97 |
+
super().__init__(
|
98 |
+
mode=encoding_to_mode(encoding, suppress_deflate_header),
|
99 |
+
executor=executor,
|
100 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
101 |
+
)
|
102 |
+
self._decompressor = zlib.decompressobj(wbits=self._mode)
|
103 |
+
|
104 |
+
def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
|
105 |
+
return self._decompressor.decompress(data, max_length)
|
106 |
+
|
107 |
+
async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
|
108 |
+
if (
|
109 |
+
self._max_sync_chunk_size is not None
|
110 |
+
and len(data) > self._max_sync_chunk_size
|
111 |
+
):
|
112 |
+
return await asyncio.get_event_loop().run_in_executor(
|
113 |
+
self._executor, self.decompress_sync, data, max_length
|
114 |
+
)
|
115 |
+
return self.decompress_sync(data, max_length)
|
116 |
+
|
117 |
+
def flush(self, length: int = 0) -> bytes:
|
118 |
+
return (
|
119 |
+
self._decompressor.flush(length)
|
120 |
+
if length > 0
|
121 |
+
else self._decompressor.flush()
|
122 |
+
)
|
123 |
+
|
124 |
+
@property
|
125 |
+
def eof(self) -> bool:
|
126 |
+
return self._decompressor.eof
|
127 |
+
|
128 |
+
@property
|
129 |
+
def unconsumed_tail(self) -> bytes:
|
130 |
+
return self._decompressor.unconsumed_tail
|
131 |
+
|
132 |
+
@property
|
133 |
+
def unused_data(self) -> bytes:
|
134 |
+
return self._decompressor.unused_data
|
135 |
+
|
136 |
+
|
137 |
+
class BrotliDecompressor:
|
138 |
+
# Supports both 'brotlipy' and 'Brotli' packages
|
139 |
+
# since they share an import name. The top branches
|
140 |
+
# are for 'brotlipy' and bottom branches for 'Brotli'
|
141 |
+
def __init__(self) -> None:
|
142 |
+
if not HAS_BROTLI:
|
143 |
+
raise RuntimeError(
|
144 |
+
"The brotli decompression is not available. "
|
145 |
+
"Please install `Brotli` module"
|
146 |
+
)
|
147 |
+
self._obj = brotli.Decompressor()
|
148 |
+
|
149 |
+
def decompress_sync(self, data: bytes) -> bytes:
|
150 |
+
if hasattr(self._obj, "decompress"):
|
151 |
+
return cast(bytes, self._obj.decompress(data))
|
152 |
+
return cast(bytes, self._obj.process(data))
|
153 |
+
|
154 |
+
def flush(self) -> bytes:
|
155 |
+
if hasattr(self._obj, "flush"):
|
156 |
+
return cast(bytes, self._obj.flush())
|
157 |
+
return b""
|
venv/lib/python3.10/site-packages/aiohttp/connector.py
ADDED
@@ -0,0 +1,1511 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import functools
|
3 |
+
import random
|
4 |
+
import sys
|
5 |
+
import traceback
|
6 |
+
import warnings
|
7 |
+
from collections import defaultdict, deque
|
8 |
+
from contextlib import suppress
|
9 |
+
from http import HTTPStatus
|
10 |
+
from http.cookies import SimpleCookie
|
11 |
+
from itertools import cycle, islice
|
12 |
+
from time import monotonic
|
13 |
+
from types import TracebackType
|
14 |
+
from typing import (
|
15 |
+
TYPE_CHECKING,
|
16 |
+
Any,
|
17 |
+
Awaitable,
|
18 |
+
Callable,
|
19 |
+
DefaultDict,
|
20 |
+
Dict,
|
21 |
+
Iterator,
|
22 |
+
List,
|
23 |
+
Literal,
|
24 |
+
Optional,
|
25 |
+
Set,
|
26 |
+
Tuple,
|
27 |
+
Type,
|
28 |
+
Union,
|
29 |
+
cast,
|
30 |
+
)
|
31 |
+
|
32 |
+
import attr
|
33 |
+
|
34 |
+
from . import hdrs, helpers
|
35 |
+
from .abc import AbstractResolver
|
36 |
+
from .client_exceptions import (
|
37 |
+
ClientConnectionError,
|
38 |
+
ClientConnectorCertificateError,
|
39 |
+
ClientConnectorError,
|
40 |
+
ClientConnectorSSLError,
|
41 |
+
ClientHttpProxyError,
|
42 |
+
ClientProxyConnectionError,
|
43 |
+
ServerFingerprintMismatch,
|
44 |
+
UnixClientConnectorError,
|
45 |
+
cert_errors,
|
46 |
+
ssl_errors,
|
47 |
+
)
|
48 |
+
from .client_proto import ResponseHandler
|
49 |
+
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
|
50 |
+
from .helpers import ceil_timeout, get_running_loop, is_ip_address, noop, sentinel
|
51 |
+
from .locks import EventResultOrError
|
52 |
+
from .resolver import DefaultResolver
|
53 |
+
|
54 |
+
try:
|
55 |
+
import ssl
|
56 |
+
|
57 |
+
SSLContext = ssl.SSLContext
|
58 |
+
except ImportError: # pragma: no cover
|
59 |
+
ssl = None # type: ignore[assignment]
|
60 |
+
SSLContext = object # type: ignore[misc,assignment]
|
61 |
+
|
62 |
+
|
63 |
+
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
|
64 |
+
|
65 |
+
|
66 |
+
if TYPE_CHECKING:
|
67 |
+
from .client import ClientTimeout
|
68 |
+
from .client_reqrep import ConnectionKey
|
69 |
+
from .tracing import Trace
|
70 |
+
|
71 |
+
|
72 |
+
class _DeprecationWaiter:
|
73 |
+
__slots__ = ("_awaitable", "_awaited")
|
74 |
+
|
75 |
+
def __init__(self, awaitable: Awaitable[Any]) -> None:
|
76 |
+
self._awaitable = awaitable
|
77 |
+
self._awaited = False
|
78 |
+
|
79 |
+
def __await__(self) -> Any:
|
80 |
+
self._awaited = True
|
81 |
+
return self._awaitable.__await__()
|
82 |
+
|
83 |
+
def __del__(self) -> None:
|
84 |
+
if not self._awaited:
|
85 |
+
warnings.warn(
|
86 |
+
"Connector.close() is a coroutine, "
|
87 |
+
"please use await connector.close()",
|
88 |
+
DeprecationWarning,
|
89 |
+
)
|
90 |
+
|
91 |
+
|
92 |
+
class Connection:
|
93 |
+
|
94 |
+
_source_traceback = None
|
95 |
+
_transport = None
|
96 |
+
|
97 |
+
def __init__(
|
98 |
+
self,
|
99 |
+
connector: "BaseConnector",
|
100 |
+
key: "ConnectionKey",
|
101 |
+
protocol: ResponseHandler,
|
102 |
+
loop: asyncio.AbstractEventLoop,
|
103 |
+
) -> None:
|
104 |
+
self._key = key
|
105 |
+
self._connector = connector
|
106 |
+
self._loop = loop
|
107 |
+
self._protocol: Optional[ResponseHandler] = protocol
|
108 |
+
self._callbacks: List[Callable[[], None]] = []
|
109 |
+
|
110 |
+
if loop.get_debug():
|
111 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
112 |
+
|
113 |
+
def __repr__(self) -> str:
|
114 |
+
return f"Connection<{self._key}>"
|
115 |
+
|
116 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
117 |
+
if self._protocol is not None:
|
118 |
+
kwargs = {"source": self}
|
119 |
+
_warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
|
120 |
+
if self._loop.is_closed():
|
121 |
+
return
|
122 |
+
|
123 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
124 |
+
|
125 |
+
context = {"client_connection": self, "message": "Unclosed connection"}
|
126 |
+
if self._source_traceback is not None:
|
127 |
+
context["source_traceback"] = self._source_traceback
|
128 |
+
self._loop.call_exception_handler(context)
|
129 |
+
|
130 |
+
def __bool__(self) -> Literal[True]:
|
131 |
+
"""Force subclasses to not be falsy, to make checks simpler."""
|
132 |
+
return True
|
133 |
+
|
134 |
+
@property
|
135 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
136 |
+
warnings.warn(
|
137 |
+
"connector.loop property is deprecated", DeprecationWarning, stacklevel=2
|
138 |
+
)
|
139 |
+
return self._loop
|
140 |
+
|
141 |
+
@property
|
142 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
143 |
+
if self._protocol is None:
|
144 |
+
return None
|
145 |
+
return self._protocol.transport
|
146 |
+
|
147 |
+
@property
|
148 |
+
def protocol(self) -> Optional[ResponseHandler]:
|
149 |
+
return self._protocol
|
150 |
+
|
151 |
+
def add_callback(self, callback: Callable[[], None]) -> None:
|
152 |
+
if callback is not None:
|
153 |
+
self._callbacks.append(callback)
|
154 |
+
|
155 |
+
def _notify_release(self) -> None:
|
156 |
+
callbacks, self._callbacks = self._callbacks[:], []
|
157 |
+
|
158 |
+
for cb in callbacks:
|
159 |
+
with suppress(Exception):
|
160 |
+
cb()
|
161 |
+
|
162 |
+
def close(self) -> None:
|
163 |
+
self._notify_release()
|
164 |
+
|
165 |
+
if self._protocol is not None:
|
166 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
167 |
+
self._protocol = None
|
168 |
+
|
169 |
+
def release(self) -> None:
|
170 |
+
self._notify_release()
|
171 |
+
|
172 |
+
if self._protocol is not None:
|
173 |
+
self._connector._release(
|
174 |
+
self._key, self._protocol, should_close=self._protocol.should_close
|
175 |
+
)
|
176 |
+
self._protocol = None
|
177 |
+
|
178 |
+
@property
|
179 |
+
def closed(self) -> bool:
|
180 |
+
return self._protocol is None or not self._protocol.is_connected()
|
181 |
+
|
182 |
+
|
183 |
+
class _TransportPlaceholder:
|
184 |
+
"""placeholder for BaseConnector.connect function"""
|
185 |
+
|
186 |
+
def close(self) -> None:
|
187 |
+
pass
|
188 |
+
|
189 |
+
|
190 |
+
class BaseConnector:
|
191 |
+
"""Base connector class.
|
192 |
+
|
193 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
194 |
+
force_close - Set to True to force close and do reconnect
|
195 |
+
after each request (and between redirects).
|
196 |
+
limit - The total number of simultaneous connections.
|
197 |
+
limit_per_host - Number of simultaneous connections to one host.
|
198 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
199 |
+
Disabled by default.
|
200 |
+
timeout_ceil_threshold - Trigger ceiling of timeout values when
|
201 |
+
it's above timeout_ceil_threshold.
|
202 |
+
loop - Optional event loop.
|
203 |
+
"""
|
204 |
+
|
205 |
+
_closed = True # prevent AttributeError in __del__ if ctor was failed
|
206 |
+
_source_traceback = None
|
207 |
+
|
208 |
+
# abort transport after 2 seconds (cleanup broken connections)
|
209 |
+
_cleanup_closed_period = 2.0
|
210 |
+
|
211 |
+
def __init__(
|
212 |
+
self,
|
213 |
+
*,
|
214 |
+
keepalive_timeout: Union[object, None, float] = sentinel,
|
215 |
+
force_close: bool = False,
|
216 |
+
limit: int = 100,
|
217 |
+
limit_per_host: int = 0,
|
218 |
+
enable_cleanup_closed: bool = False,
|
219 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
220 |
+
timeout_ceil_threshold: float = 5,
|
221 |
+
) -> None:
|
222 |
+
|
223 |
+
if force_close:
|
224 |
+
if keepalive_timeout is not None and keepalive_timeout is not sentinel:
|
225 |
+
raise ValueError(
|
226 |
+
"keepalive_timeout cannot " "be set if force_close is True"
|
227 |
+
)
|
228 |
+
else:
|
229 |
+
if keepalive_timeout is sentinel:
|
230 |
+
keepalive_timeout = 15.0
|
231 |
+
|
232 |
+
loop = get_running_loop(loop)
|
233 |
+
self._timeout_ceil_threshold = timeout_ceil_threshold
|
234 |
+
|
235 |
+
self._closed = False
|
236 |
+
if loop.get_debug():
|
237 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
238 |
+
|
239 |
+
self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {}
|
240 |
+
self._limit = limit
|
241 |
+
self._limit_per_host = limit_per_host
|
242 |
+
self._acquired: Set[ResponseHandler] = set()
|
243 |
+
self._acquired_per_host: DefaultDict[
|
244 |
+
ConnectionKey, Set[ResponseHandler]
|
245 |
+
] = defaultdict(set)
|
246 |
+
self._keepalive_timeout = cast(float, keepalive_timeout)
|
247 |
+
self._force_close = force_close
|
248 |
+
|
249 |
+
# {host_key: FIFO list of waiters}
|
250 |
+
self._waiters = defaultdict(deque) # type: ignore[var-annotated]
|
251 |
+
|
252 |
+
self._loop = loop
|
253 |
+
self._factory = functools.partial(ResponseHandler, loop=loop)
|
254 |
+
|
255 |
+
self.cookies = SimpleCookie()
|
256 |
+
|
257 |
+
# start keep-alive connection cleanup task
|
258 |
+
self._cleanup_handle: Optional[asyncio.TimerHandle] = None
|
259 |
+
|
260 |
+
# start cleanup closed transports task
|
261 |
+
self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
|
262 |
+
self._cleanup_closed_disabled = not enable_cleanup_closed
|
263 |
+
self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
|
264 |
+
self._cleanup_closed()
|
265 |
+
|
266 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
267 |
+
if self._closed:
|
268 |
+
return
|
269 |
+
if not self._conns:
|
270 |
+
return
|
271 |
+
|
272 |
+
conns = [repr(c) for c in self._conns.values()]
|
273 |
+
|
274 |
+
self._close()
|
275 |
+
|
276 |
+
kwargs = {"source": self}
|
277 |
+
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
|
278 |
+
context = {
|
279 |
+
"connector": self,
|
280 |
+
"connections": conns,
|
281 |
+
"message": "Unclosed connector",
|
282 |
+
}
|
283 |
+
if self._source_traceback is not None:
|
284 |
+
context["source_traceback"] = self._source_traceback
|
285 |
+
self._loop.call_exception_handler(context)
|
286 |
+
|
287 |
+
def __enter__(self) -> "BaseConnector":
|
288 |
+
warnings.warn(
|
289 |
+
'"with Connector():" is deprecated, '
|
290 |
+
'use "async with Connector():" instead',
|
291 |
+
DeprecationWarning,
|
292 |
+
)
|
293 |
+
return self
|
294 |
+
|
295 |
+
def __exit__(self, *exc: Any) -> None:
|
296 |
+
self._close()
|
297 |
+
|
298 |
+
async def __aenter__(self) -> "BaseConnector":
|
299 |
+
return self
|
300 |
+
|
301 |
+
async def __aexit__(
|
302 |
+
self,
|
303 |
+
exc_type: Optional[Type[BaseException]] = None,
|
304 |
+
exc_value: Optional[BaseException] = None,
|
305 |
+
exc_traceback: Optional[TracebackType] = None,
|
306 |
+
) -> None:
|
307 |
+
await self.close()
|
308 |
+
|
309 |
+
@property
|
310 |
+
def force_close(self) -> bool:
|
311 |
+
"""Ultimately close connection on releasing if True."""
|
312 |
+
return self._force_close
|
313 |
+
|
314 |
+
@property
|
315 |
+
def limit(self) -> int:
|
316 |
+
"""The total number for simultaneous connections.
|
317 |
+
|
318 |
+
If limit is 0 the connector has no limit.
|
319 |
+
The default limit size is 100.
|
320 |
+
"""
|
321 |
+
return self._limit
|
322 |
+
|
323 |
+
@property
|
324 |
+
def limit_per_host(self) -> int:
|
325 |
+
"""The limit for simultaneous connections to the same endpoint.
|
326 |
+
|
327 |
+
Endpoints are the same if they are have equal
|
328 |
+
(host, port, is_ssl) triple.
|
329 |
+
"""
|
330 |
+
return self._limit_per_host
|
331 |
+
|
332 |
+
def _cleanup(self) -> None:
|
333 |
+
"""Cleanup unused transports."""
|
334 |
+
if self._cleanup_handle:
|
335 |
+
self._cleanup_handle.cancel()
|
336 |
+
# _cleanup_handle should be unset, otherwise _release() will not
|
337 |
+
# recreate it ever!
|
338 |
+
self._cleanup_handle = None
|
339 |
+
|
340 |
+
now = self._loop.time()
|
341 |
+
timeout = self._keepalive_timeout
|
342 |
+
|
343 |
+
if self._conns:
|
344 |
+
connections = {}
|
345 |
+
deadline = now - timeout
|
346 |
+
for key, conns in self._conns.items():
|
347 |
+
alive = []
|
348 |
+
for proto, use_time in conns:
|
349 |
+
if proto.is_connected():
|
350 |
+
if use_time - deadline < 0:
|
351 |
+
transport = proto.transport
|
352 |
+
proto.close()
|
353 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
354 |
+
self._cleanup_closed_transports.append(transport)
|
355 |
+
else:
|
356 |
+
alive.append((proto, use_time))
|
357 |
+
else:
|
358 |
+
transport = proto.transport
|
359 |
+
proto.close()
|
360 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
361 |
+
self._cleanup_closed_transports.append(transport)
|
362 |
+
|
363 |
+
if alive:
|
364 |
+
connections[key] = alive
|
365 |
+
|
366 |
+
self._conns = connections
|
367 |
+
|
368 |
+
if self._conns:
|
369 |
+
self._cleanup_handle = helpers.weakref_handle(
|
370 |
+
self,
|
371 |
+
"_cleanup",
|
372 |
+
timeout,
|
373 |
+
self._loop,
|
374 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
375 |
+
)
|
376 |
+
|
377 |
+
def _drop_acquired_per_host(
|
378 |
+
self, key: "ConnectionKey", val: ResponseHandler
|
379 |
+
) -> None:
|
380 |
+
acquired_per_host = self._acquired_per_host
|
381 |
+
if key not in acquired_per_host:
|
382 |
+
return
|
383 |
+
conns = acquired_per_host[key]
|
384 |
+
conns.remove(val)
|
385 |
+
if not conns:
|
386 |
+
del self._acquired_per_host[key]
|
387 |
+
|
388 |
+
def _cleanup_closed(self) -> None:
|
389 |
+
"""Double confirmation for transport close.
|
390 |
+
|
391 |
+
Some broken ssl servers may leave socket open without proper close.
|
392 |
+
"""
|
393 |
+
if self._cleanup_closed_handle:
|
394 |
+
self._cleanup_closed_handle.cancel()
|
395 |
+
|
396 |
+
for transport in self._cleanup_closed_transports:
|
397 |
+
if transport is not None:
|
398 |
+
transport.abort()
|
399 |
+
|
400 |
+
self._cleanup_closed_transports = []
|
401 |
+
|
402 |
+
if not self._cleanup_closed_disabled:
|
403 |
+
self._cleanup_closed_handle = helpers.weakref_handle(
|
404 |
+
self,
|
405 |
+
"_cleanup_closed",
|
406 |
+
self._cleanup_closed_period,
|
407 |
+
self._loop,
|
408 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
409 |
+
)
|
410 |
+
|
411 |
+
def close(self) -> Awaitable[None]:
|
412 |
+
"""Close all opened transports."""
|
413 |
+
self._close()
|
414 |
+
return _DeprecationWaiter(noop())
|
415 |
+
|
416 |
+
def _close(self) -> None:
|
417 |
+
if self._closed:
|
418 |
+
return
|
419 |
+
|
420 |
+
self._closed = True
|
421 |
+
|
422 |
+
try:
|
423 |
+
if self._loop.is_closed():
|
424 |
+
return
|
425 |
+
|
426 |
+
# cancel cleanup task
|
427 |
+
if self._cleanup_handle:
|
428 |
+
self._cleanup_handle.cancel()
|
429 |
+
|
430 |
+
# cancel cleanup close task
|
431 |
+
if self._cleanup_closed_handle:
|
432 |
+
self._cleanup_closed_handle.cancel()
|
433 |
+
|
434 |
+
for data in self._conns.values():
|
435 |
+
for proto, t0 in data:
|
436 |
+
proto.close()
|
437 |
+
|
438 |
+
for proto in self._acquired:
|
439 |
+
proto.close()
|
440 |
+
|
441 |
+
for transport in self._cleanup_closed_transports:
|
442 |
+
if transport is not None:
|
443 |
+
transport.abort()
|
444 |
+
|
445 |
+
finally:
|
446 |
+
self._conns.clear()
|
447 |
+
self._acquired.clear()
|
448 |
+
self._waiters.clear()
|
449 |
+
self._cleanup_handle = None
|
450 |
+
self._cleanup_closed_transports.clear()
|
451 |
+
self._cleanup_closed_handle = None
|
452 |
+
|
453 |
+
@property
|
454 |
+
def closed(self) -> bool:
|
455 |
+
"""Is connector closed.
|
456 |
+
|
457 |
+
A readonly property.
|
458 |
+
"""
|
459 |
+
return self._closed
|
460 |
+
|
461 |
+
def _available_connections(self, key: "ConnectionKey") -> int:
|
462 |
+
"""
|
463 |
+
Return number of available connections.
|
464 |
+
|
465 |
+
The limit, limit_per_host and the connection key are taken into account.
|
466 |
+
|
467 |
+
If it returns less than 1 means that there are no connections
|
468 |
+
available.
|
469 |
+
"""
|
470 |
+
if self._limit:
|
471 |
+
# total calc available connections
|
472 |
+
available = self._limit - len(self._acquired)
|
473 |
+
|
474 |
+
# check limit per host
|
475 |
+
if (
|
476 |
+
self._limit_per_host
|
477 |
+
and available > 0
|
478 |
+
and key in self._acquired_per_host
|
479 |
+
):
|
480 |
+
acquired = self._acquired_per_host.get(key)
|
481 |
+
assert acquired is not None
|
482 |
+
available = self._limit_per_host - len(acquired)
|
483 |
+
|
484 |
+
elif self._limit_per_host and key in self._acquired_per_host:
|
485 |
+
# check limit per host
|
486 |
+
acquired = self._acquired_per_host.get(key)
|
487 |
+
assert acquired is not None
|
488 |
+
available = self._limit_per_host - len(acquired)
|
489 |
+
else:
|
490 |
+
available = 1
|
491 |
+
|
492 |
+
return available
|
493 |
+
|
494 |
+
async def connect(
|
495 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
496 |
+
) -> Connection:
|
497 |
+
"""Get from pool or create new connection."""
|
498 |
+
key = req.connection_key
|
499 |
+
available = self._available_connections(key)
|
500 |
+
|
501 |
+
# Wait if there are no available connections or if there are/were
|
502 |
+
# waiters (i.e. don't steal connection from a waiter about to wake up)
|
503 |
+
if available <= 0 or key in self._waiters:
|
504 |
+
fut = self._loop.create_future()
|
505 |
+
|
506 |
+
# This connection will now count towards the limit.
|
507 |
+
self._waiters[key].append(fut)
|
508 |
+
|
509 |
+
if traces:
|
510 |
+
for trace in traces:
|
511 |
+
await trace.send_connection_queued_start()
|
512 |
+
|
513 |
+
try:
|
514 |
+
await fut
|
515 |
+
except BaseException as e:
|
516 |
+
if key in self._waiters:
|
517 |
+
# remove a waiter even if it was cancelled, normally it's
|
518 |
+
# removed when it's notified
|
519 |
+
try:
|
520 |
+
self._waiters[key].remove(fut)
|
521 |
+
except ValueError: # fut may no longer be in list
|
522 |
+
pass
|
523 |
+
|
524 |
+
raise e
|
525 |
+
finally:
|
526 |
+
if key in self._waiters and not self._waiters[key]:
|
527 |
+
del self._waiters[key]
|
528 |
+
|
529 |
+
if traces:
|
530 |
+
for trace in traces:
|
531 |
+
await trace.send_connection_queued_end()
|
532 |
+
|
533 |
+
proto = self._get(key)
|
534 |
+
if proto is None:
|
535 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
536 |
+
self._acquired.add(placeholder)
|
537 |
+
self._acquired_per_host[key].add(placeholder)
|
538 |
+
|
539 |
+
if traces:
|
540 |
+
for trace in traces:
|
541 |
+
await trace.send_connection_create_start()
|
542 |
+
|
543 |
+
try:
|
544 |
+
proto = await self._create_connection(req, traces, timeout)
|
545 |
+
if self._closed:
|
546 |
+
proto.close()
|
547 |
+
raise ClientConnectionError("Connector is closed.")
|
548 |
+
except BaseException:
|
549 |
+
if not self._closed:
|
550 |
+
self._acquired.remove(placeholder)
|
551 |
+
self._drop_acquired_per_host(key, placeholder)
|
552 |
+
self._release_waiter()
|
553 |
+
raise
|
554 |
+
else:
|
555 |
+
if not self._closed:
|
556 |
+
self._acquired.remove(placeholder)
|
557 |
+
self._drop_acquired_per_host(key, placeholder)
|
558 |
+
|
559 |
+
if traces:
|
560 |
+
for trace in traces:
|
561 |
+
await trace.send_connection_create_end()
|
562 |
+
else:
|
563 |
+
if traces:
|
564 |
+
# Acquire the connection to prevent race conditions with limits
|
565 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
566 |
+
self._acquired.add(placeholder)
|
567 |
+
self._acquired_per_host[key].add(placeholder)
|
568 |
+
for trace in traces:
|
569 |
+
await trace.send_connection_reuseconn()
|
570 |
+
self._acquired.remove(placeholder)
|
571 |
+
self._drop_acquired_per_host(key, placeholder)
|
572 |
+
|
573 |
+
self._acquired.add(proto)
|
574 |
+
self._acquired_per_host[key].add(proto)
|
575 |
+
return Connection(self, key, proto, self._loop)
|
576 |
+
|
577 |
+
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
|
578 |
+
try:
|
579 |
+
conns = self._conns[key]
|
580 |
+
except KeyError:
|
581 |
+
return None
|
582 |
+
|
583 |
+
t1 = self._loop.time()
|
584 |
+
while conns:
|
585 |
+
proto, t0 = conns.pop()
|
586 |
+
if proto.is_connected():
|
587 |
+
if t1 - t0 > self._keepalive_timeout:
|
588 |
+
transport = proto.transport
|
589 |
+
proto.close()
|
590 |
+
# only for SSL transports
|
591 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
592 |
+
self._cleanup_closed_transports.append(transport)
|
593 |
+
else:
|
594 |
+
if not conns:
|
595 |
+
# The very last connection was reclaimed: drop the key
|
596 |
+
del self._conns[key]
|
597 |
+
return proto
|
598 |
+
else:
|
599 |
+
transport = proto.transport
|
600 |
+
proto.close()
|
601 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
602 |
+
self._cleanup_closed_transports.append(transport)
|
603 |
+
|
604 |
+
# No more connections: drop the key
|
605 |
+
del self._conns[key]
|
606 |
+
return None
|
607 |
+
|
608 |
+
def _release_waiter(self) -> None:
|
609 |
+
"""
|
610 |
+
Iterates over all waiters until one to be released is found.
|
611 |
+
|
612 |
+
The one to be released is not finished and
|
613 |
+
belongs to a host that has available connections.
|
614 |
+
"""
|
615 |
+
if not self._waiters:
|
616 |
+
return
|
617 |
+
|
618 |
+
# Having the dict keys ordered this avoids to iterate
|
619 |
+
# at the same order at each call.
|
620 |
+
queues = list(self._waiters.keys())
|
621 |
+
random.shuffle(queues)
|
622 |
+
|
623 |
+
for key in queues:
|
624 |
+
if self._available_connections(key) < 1:
|
625 |
+
continue
|
626 |
+
|
627 |
+
waiters = self._waiters[key]
|
628 |
+
while waiters:
|
629 |
+
waiter = waiters.popleft()
|
630 |
+
if not waiter.done():
|
631 |
+
waiter.set_result(None)
|
632 |
+
return
|
633 |
+
|
634 |
+
def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
|
635 |
+
if self._closed:
|
636 |
+
# acquired connection is already released on connector closing
|
637 |
+
return
|
638 |
+
|
639 |
+
try:
|
640 |
+
self._acquired.remove(proto)
|
641 |
+
self._drop_acquired_per_host(key, proto)
|
642 |
+
except KeyError: # pragma: no cover
|
643 |
+
# this may be result of undetermenistic order of objects
|
644 |
+
# finalization due garbage collection.
|
645 |
+
pass
|
646 |
+
else:
|
647 |
+
self._release_waiter()
|
648 |
+
|
649 |
+
def _release(
|
650 |
+
self,
|
651 |
+
key: "ConnectionKey",
|
652 |
+
protocol: ResponseHandler,
|
653 |
+
*,
|
654 |
+
should_close: bool = False,
|
655 |
+
) -> None:
|
656 |
+
if self._closed:
|
657 |
+
# acquired connection is already released on connector closing
|
658 |
+
return
|
659 |
+
|
660 |
+
self._release_acquired(key, protocol)
|
661 |
+
|
662 |
+
if self._force_close:
|
663 |
+
should_close = True
|
664 |
+
|
665 |
+
if should_close or protocol.should_close:
|
666 |
+
transport = protocol.transport
|
667 |
+
protocol.close()
|
668 |
+
|
669 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
670 |
+
self._cleanup_closed_transports.append(transport)
|
671 |
+
else:
|
672 |
+
conns = self._conns.get(key)
|
673 |
+
if conns is None:
|
674 |
+
conns = self._conns[key] = []
|
675 |
+
conns.append((protocol, self._loop.time()))
|
676 |
+
|
677 |
+
if self._cleanup_handle is None:
|
678 |
+
self._cleanup_handle = helpers.weakref_handle(
|
679 |
+
self,
|
680 |
+
"_cleanup",
|
681 |
+
self._keepalive_timeout,
|
682 |
+
self._loop,
|
683 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
684 |
+
)
|
685 |
+
|
686 |
+
async def _create_connection(
|
687 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
688 |
+
) -> ResponseHandler:
|
689 |
+
raise NotImplementedError()
|
690 |
+
|
691 |
+
|
692 |
+
class _DNSCacheTable:
|
693 |
+
def __init__(self, ttl: Optional[float] = None) -> None:
|
694 |
+
self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {}
|
695 |
+
self._timestamps: Dict[Tuple[str, int], float] = {}
|
696 |
+
self._ttl = ttl
|
697 |
+
|
698 |
+
def __contains__(self, host: object) -> bool:
|
699 |
+
return host in self._addrs_rr
|
700 |
+
|
701 |
+
def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None:
|
702 |
+
self._addrs_rr[key] = (cycle(addrs), len(addrs))
|
703 |
+
|
704 |
+
if self._ttl is not None:
|
705 |
+
self._timestamps[key] = monotonic()
|
706 |
+
|
707 |
+
def remove(self, key: Tuple[str, int]) -> None:
|
708 |
+
self._addrs_rr.pop(key, None)
|
709 |
+
|
710 |
+
if self._ttl is not None:
|
711 |
+
self._timestamps.pop(key, None)
|
712 |
+
|
713 |
+
def clear(self) -> None:
|
714 |
+
self._addrs_rr.clear()
|
715 |
+
self._timestamps.clear()
|
716 |
+
|
717 |
+
def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]:
|
718 |
+
loop, length = self._addrs_rr[key]
|
719 |
+
addrs = list(islice(loop, length))
|
720 |
+
# Consume one more element to shift internal state of `cycle`
|
721 |
+
next(loop)
|
722 |
+
return addrs
|
723 |
+
|
724 |
+
def expired(self, key: Tuple[str, int]) -> bool:
|
725 |
+
if self._ttl is None:
|
726 |
+
return False
|
727 |
+
|
728 |
+
return self._timestamps[key] + self._ttl < monotonic()
|
729 |
+
|
730 |
+
|
731 |
+
class TCPConnector(BaseConnector):
|
732 |
+
"""TCP connector.
|
733 |
+
|
734 |
+
verify_ssl - Set to True to check ssl certifications.
|
735 |
+
fingerprint - Pass the binary sha256
|
736 |
+
digest of the expected certificate in DER format to verify
|
737 |
+
that the certificate the server presents matches. See also
|
738 |
+
https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning
|
739 |
+
resolver - Enable DNS lookups and use this
|
740 |
+
resolver
|
741 |
+
use_dns_cache - Use memory cache for DNS lookups.
|
742 |
+
ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
|
743 |
+
family - socket address family
|
744 |
+
local_addr - local tuple of (host, port) to bind socket to
|
745 |
+
|
746 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
747 |
+
force_close - Set to True to force close and do reconnect
|
748 |
+
after each request (and between redirects).
|
749 |
+
limit - The total number of simultaneous connections.
|
750 |
+
limit_per_host - Number of simultaneous connections to one host.
|
751 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
752 |
+
Disabled by default.
|
753 |
+
loop - Optional event loop.
|
754 |
+
"""
|
755 |
+
|
756 |
+
def __init__(
|
757 |
+
self,
|
758 |
+
*,
|
759 |
+
verify_ssl: bool = True,
|
760 |
+
fingerprint: Optional[bytes] = None,
|
761 |
+
use_dns_cache: bool = True,
|
762 |
+
ttl_dns_cache: Optional[int] = 10,
|
763 |
+
family: int = 0,
|
764 |
+
ssl_context: Optional[SSLContext] = None,
|
765 |
+
ssl: Union[bool, Fingerprint, SSLContext] = True,
|
766 |
+
local_addr: Optional[Tuple[str, int]] = None,
|
767 |
+
resolver: Optional[AbstractResolver] = None,
|
768 |
+
keepalive_timeout: Union[None, float, object] = sentinel,
|
769 |
+
force_close: bool = False,
|
770 |
+
limit: int = 100,
|
771 |
+
limit_per_host: int = 0,
|
772 |
+
enable_cleanup_closed: bool = False,
|
773 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
774 |
+
timeout_ceil_threshold: float = 5,
|
775 |
+
):
|
776 |
+
super().__init__(
|
777 |
+
keepalive_timeout=keepalive_timeout,
|
778 |
+
force_close=force_close,
|
779 |
+
limit=limit,
|
780 |
+
limit_per_host=limit_per_host,
|
781 |
+
enable_cleanup_closed=enable_cleanup_closed,
|
782 |
+
loop=loop,
|
783 |
+
timeout_ceil_threshold=timeout_ceil_threshold,
|
784 |
+
)
|
785 |
+
|
786 |
+
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
787 |
+
if resolver is None:
|
788 |
+
resolver = DefaultResolver(loop=self._loop)
|
789 |
+
self._resolver = resolver
|
790 |
+
|
791 |
+
self._use_dns_cache = use_dns_cache
|
792 |
+
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
|
793 |
+
self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {}
|
794 |
+
self._family = family
|
795 |
+
self._local_addr = local_addr
|
796 |
+
|
797 |
+
def close(self) -> Awaitable[None]:
|
798 |
+
"""Close all ongoing DNS calls."""
|
799 |
+
for ev in self._throttle_dns_events.values():
|
800 |
+
ev.cancel()
|
801 |
+
|
802 |
+
return super().close()
|
803 |
+
|
804 |
+
@property
|
805 |
+
def family(self) -> int:
|
806 |
+
"""Socket family like AF_INET."""
|
807 |
+
return self._family
|
808 |
+
|
809 |
+
@property
|
810 |
+
def use_dns_cache(self) -> bool:
|
811 |
+
"""True if local DNS caching is enabled."""
|
812 |
+
return self._use_dns_cache
|
813 |
+
|
814 |
+
def clear_dns_cache(
|
815 |
+
self, host: Optional[str] = None, port: Optional[int] = None
|
816 |
+
) -> None:
|
817 |
+
"""Remove specified host/port or clear all dns local cache."""
|
818 |
+
if host is not None and port is not None:
|
819 |
+
self._cached_hosts.remove((host, port))
|
820 |
+
elif host is not None or port is not None:
|
821 |
+
raise ValueError("either both host and port " "or none of them are allowed")
|
822 |
+
else:
|
823 |
+
self._cached_hosts.clear()
|
824 |
+
|
825 |
+
async def _resolve_host(
|
826 |
+
self, host: str, port: int, traces: Optional[List["Trace"]] = None
|
827 |
+
) -> List[Dict[str, Any]]:
|
828 |
+
"""Resolve host and return list of addresses."""
|
829 |
+
if is_ip_address(host):
|
830 |
+
return [
|
831 |
+
{
|
832 |
+
"hostname": host,
|
833 |
+
"host": host,
|
834 |
+
"port": port,
|
835 |
+
"family": self._family,
|
836 |
+
"proto": 0,
|
837 |
+
"flags": 0,
|
838 |
+
}
|
839 |
+
]
|
840 |
+
|
841 |
+
if not self._use_dns_cache:
|
842 |
+
|
843 |
+
if traces:
|
844 |
+
for trace in traces:
|
845 |
+
await trace.send_dns_resolvehost_start(host)
|
846 |
+
|
847 |
+
res = await self._resolver.resolve(host, port, family=self._family)
|
848 |
+
|
849 |
+
if traces:
|
850 |
+
for trace in traces:
|
851 |
+
await trace.send_dns_resolvehost_end(host)
|
852 |
+
|
853 |
+
return res
|
854 |
+
|
855 |
+
key = (host, port)
|
856 |
+
if key in self._cached_hosts and not self._cached_hosts.expired(key):
|
857 |
+
# get result early, before any await (#4014)
|
858 |
+
result = self._cached_hosts.next_addrs(key)
|
859 |
+
|
860 |
+
if traces:
|
861 |
+
for trace in traces:
|
862 |
+
await trace.send_dns_cache_hit(host)
|
863 |
+
return result
|
864 |
+
|
865 |
+
#
|
866 |
+
# If multiple connectors are resolving the same host, we wait
|
867 |
+
# for the first one to resolve and then use the result for all of them.
|
868 |
+
# We use a throttle event to ensure that we only resolve the host once
|
869 |
+
# and then use the result for all the waiters.
|
870 |
+
#
|
871 |
+
# In this case we need to create a task to ensure that we can shield
|
872 |
+
# the task from cancellation as cancelling this lookup should not cancel
|
873 |
+
# the underlying lookup or else the cancel event will get broadcast to
|
874 |
+
# all the waiters across all connections.
|
875 |
+
#
|
876 |
+
resolved_host_task = asyncio.create_task(
|
877 |
+
self._resolve_host_with_throttle(key, host, port, traces)
|
878 |
+
)
|
879 |
+
try:
|
880 |
+
return await asyncio.shield(resolved_host_task)
|
881 |
+
except asyncio.CancelledError:
|
882 |
+
|
883 |
+
def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
|
884 |
+
with suppress(Exception, asyncio.CancelledError):
|
885 |
+
fut.result()
|
886 |
+
|
887 |
+
resolved_host_task.add_done_callback(drop_exception)
|
888 |
+
raise
|
889 |
+
|
890 |
+
async def _resolve_host_with_throttle(
|
891 |
+
self,
|
892 |
+
key: Tuple[str, int],
|
893 |
+
host: str,
|
894 |
+
port: int,
|
895 |
+
traces: Optional[List["Trace"]],
|
896 |
+
) -> List[Dict[str, Any]]:
|
897 |
+
"""Resolve host with a dns events throttle."""
|
898 |
+
if key in self._throttle_dns_events:
|
899 |
+
# get event early, before any await (#4014)
|
900 |
+
event = self._throttle_dns_events[key]
|
901 |
+
if traces:
|
902 |
+
for trace in traces:
|
903 |
+
await trace.send_dns_cache_hit(host)
|
904 |
+
await event.wait()
|
905 |
+
else:
|
906 |
+
# update dict early, before any await (#4014)
|
907 |
+
self._throttle_dns_events[key] = EventResultOrError(self._loop)
|
908 |
+
if traces:
|
909 |
+
for trace in traces:
|
910 |
+
await trace.send_dns_cache_miss(host)
|
911 |
+
try:
|
912 |
+
|
913 |
+
if traces:
|
914 |
+
for trace in traces:
|
915 |
+
await trace.send_dns_resolvehost_start(host)
|
916 |
+
|
917 |
+
addrs = await self._resolver.resolve(host, port, family=self._family)
|
918 |
+
if traces:
|
919 |
+
for trace in traces:
|
920 |
+
await trace.send_dns_resolvehost_end(host)
|
921 |
+
|
922 |
+
self._cached_hosts.add(key, addrs)
|
923 |
+
self._throttle_dns_events[key].set()
|
924 |
+
except BaseException as e:
|
925 |
+
# any DNS exception, independently of the implementation
|
926 |
+
# is set for the waiters to raise the same exception.
|
927 |
+
self._throttle_dns_events[key].set(exc=e)
|
928 |
+
raise
|
929 |
+
finally:
|
930 |
+
self._throttle_dns_events.pop(key)
|
931 |
+
|
932 |
+
return self._cached_hosts.next_addrs(key)
|
933 |
+
|
934 |
+
async def _create_connection(
|
935 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
936 |
+
) -> ResponseHandler:
|
937 |
+
"""Create connection.
|
938 |
+
|
939 |
+
Has same keyword arguments as BaseEventLoop.create_connection.
|
940 |
+
"""
|
941 |
+
if req.proxy:
|
942 |
+
_, proto = await self._create_proxy_connection(req, traces, timeout)
|
943 |
+
else:
|
944 |
+
_, proto = await self._create_direct_connection(req, traces, timeout)
|
945 |
+
|
946 |
+
return proto
|
947 |
+
|
948 |
+
@staticmethod
|
949 |
+
@functools.lru_cache(None)
|
950 |
+
def _make_ssl_context(verified: bool) -> SSLContext:
|
951 |
+
if verified:
|
952 |
+
return ssl.create_default_context()
|
953 |
+
else:
|
954 |
+
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
955 |
+
sslcontext.options |= ssl.OP_NO_SSLv2
|
956 |
+
sslcontext.options |= ssl.OP_NO_SSLv3
|
957 |
+
sslcontext.check_hostname = False
|
958 |
+
sslcontext.verify_mode = ssl.CERT_NONE
|
959 |
+
try:
|
960 |
+
sslcontext.options |= ssl.OP_NO_COMPRESSION
|
961 |
+
except AttributeError as attr_err:
|
962 |
+
warnings.warn(
|
963 |
+
"{!s}: The Python interpreter is compiled "
|
964 |
+
"against OpenSSL < 1.0.0. Ref: "
|
965 |
+
"https://docs.python.org/3/library/ssl.html"
|
966 |
+
"#ssl.OP_NO_COMPRESSION".format(attr_err),
|
967 |
+
)
|
968 |
+
sslcontext.set_default_verify_paths()
|
969 |
+
return sslcontext
|
970 |
+
|
971 |
+
def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
|
972 |
+
"""Logic to get the correct SSL context
|
973 |
+
|
974 |
+
0. if req.ssl is false, return None
|
975 |
+
|
976 |
+
1. if ssl_context is specified in req, use it
|
977 |
+
2. if _ssl_context is specified in self, use it
|
978 |
+
3. otherwise:
|
979 |
+
1. if verify_ssl is not specified in req, use self.ssl_context
|
980 |
+
(will generate a default context according to self.verify_ssl)
|
981 |
+
2. if verify_ssl is True in req, generate a default SSL context
|
982 |
+
3. if verify_ssl is False in req, generate a SSL context that
|
983 |
+
won't verify
|
984 |
+
"""
|
985 |
+
if req.is_ssl():
|
986 |
+
if ssl is None: # pragma: no cover
|
987 |
+
raise RuntimeError("SSL is not supported.")
|
988 |
+
sslcontext = req.ssl
|
989 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
990 |
+
return sslcontext
|
991 |
+
if sslcontext is not True:
|
992 |
+
# not verified or fingerprinted
|
993 |
+
return self._make_ssl_context(False)
|
994 |
+
sslcontext = self._ssl
|
995 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
996 |
+
return sslcontext
|
997 |
+
if sslcontext is not True:
|
998 |
+
# not verified or fingerprinted
|
999 |
+
return self._make_ssl_context(False)
|
1000 |
+
return self._make_ssl_context(True)
|
1001 |
+
else:
|
1002 |
+
return None
|
1003 |
+
|
1004 |
+
def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
|
1005 |
+
ret = req.ssl
|
1006 |
+
if isinstance(ret, Fingerprint):
|
1007 |
+
return ret
|
1008 |
+
ret = self._ssl
|
1009 |
+
if isinstance(ret, Fingerprint):
|
1010 |
+
return ret
|
1011 |
+
return None
|
1012 |
+
|
1013 |
+
async def _wrap_create_connection(
|
1014 |
+
self,
|
1015 |
+
*args: Any,
|
1016 |
+
req: ClientRequest,
|
1017 |
+
timeout: "ClientTimeout",
|
1018 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1019 |
+
**kwargs: Any,
|
1020 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
1021 |
+
try:
|
1022 |
+
async with ceil_timeout(
|
1023 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1024 |
+
):
|
1025 |
+
return await self._loop.create_connection(*args, **kwargs)
|
1026 |
+
except cert_errors as exc:
|
1027 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
1028 |
+
except ssl_errors as exc:
|
1029 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
1030 |
+
except OSError as exc:
|
1031 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1032 |
+
raise
|
1033 |
+
raise client_error(req.connection_key, exc) from exc
|
1034 |
+
|
1035 |
+
def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
|
1036 |
+
"""Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
|
1037 |
+
|
1038 |
+
It is necessary for TLS-in-TLS so that it is possible to
|
1039 |
+
send HTTPS queries through HTTPS proxies.
|
1040 |
+
|
1041 |
+
This doesn't affect regular HTTP requests, though.
|
1042 |
+
"""
|
1043 |
+
if not req.is_ssl():
|
1044 |
+
return
|
1045 |
+
|
1046 |
+
proxy_url = req.proxy
|
1047 |
+
assert proxy_url is not None
|
1048 |
+
if proxy_url.scheme != "https":
|
1049 |
+
return
|
1050 |
+
|
1051 |
+
self._check_loop_for_start_tls()
|
1052 |
+
|
1053 |
+
def _check_loop_for_start_tls(self) -> None:
|
1054 |
+
try:
|
1055 |
+
self._loop.start_tls
|
1056 |
+
except AttributeError as attr_exc:
|
1057 |
+
raise RuntimeError(
|
1058 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
1059 |
+
"This needs support for TLS in TLS but it is not implemented "
|
1060 |
+
"in your runtime for the stdlib asyncio.\n\n"
|
1061 |
+
"Please upgrade to Python 3.11 or higher. For more details, "
|
1062 |
+
"please see:\n"
|
1063 |
+
"* https://bugs.python.org/issue37179\n"
|
1064 |
+
"* https://github.com/python/cpython/pull/28073\n"
|
1065 |
+
"* https://docs.aiohttp.org/en/stable/"
|
1066 |
+
"client_advanced.html#proxy-support\n"
|
1067 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
1068 |
+
) from attr_exc
|
1069 |
+
|
1070 |
+
def _loop_supports_start_tls(self) -> bool:
|
1071 |
+
try:
|
1072 |
+
self._check_loop_for_start_tls()
|
1073 |
+
except RuntimeError:
|
1074 |
+
return False
|
1075 |
+
else:
|
1076 |
+
return True
|
1077 |
+
|
1078 |
+
def _warn_about_tls_in_tls(
|
1079 |
+
self,
|
1080 |
+
underlying_transport: asyncio.Transport,
|
1081 |
+
req: ClientRequest,
|
1082 |
+
) -> None:
|
1083 |
+
"""Issue a warning if the requested URL has HTTPS scheme."""
|
1084 |
+
if req.request_info.url.scheme != "https":
|
1085 |
+
return
|
1086 |
+
|
1087 |
+
asyncio_supports_tls_in_tls = getattr(
|
1088 |
+
underlying_transport,
|
1089 |
+
"_start_tls_compatible",
|
1090 |
+
False,
|
1091 |
+
)
|
1092 |
+
|
1093 |
+
if asyncio_supports_tls_in_tls:
|
1094 |
+
return
|
1095 |
+
|
1096 |
+
warnings.warn(
|
1097 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
1098 |
+
"This support for TLS in TLS is known to be disabled "
|
1099 |
+
"in the stdlib asyncio (Python <3.11). This is why you'll probably see "
|
1100 |
+
"an error in the log below.\n\n"
|
1101 |
+
"It is possible to enable it via monkeypatching. "
|
1102 |
+
"For more details, see:\n"
|
1103 |
+
"* https://bugs.python.org/issue37179\n"
|
1104 |
+
"* https://github.com/python/cpython/pull/28073\n\n"
|
1105 |
+
"You can temporarily patch this as follows:\n"
|
1106 |
+
"* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
|
1107 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
1108 |
+
RuntimeWarning,
|
1109 |
+
source=self,
|
1110 |
+
# Why `4`? At least 3 of the calls in the stack originate
|
1111 |
+
# from the methods in this class.
|
1112 |
+
stacklevel=3,
|
1113 |
+
)
|
1114 |
+
|
1115 |
+
async def _start_tls_connection(
|
1116 |
+
self,
|
1117 |
+
underlying_transport: asyncio.Transport,
|
1118 |
+
req: ClientRequest,
|
1119 |
+
timeout: "ClientTimeout",
|
1120 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1121 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
1122 |
+
"""Wrap the raw TCP transport with TLS."""
|
1123 |
+
tls_proto = self._factory() # Create a brand new proto for TLS
|
1124 |
+
|
1125 |
+
# Safety of the `cast()` call here is based on the fact that
|
1126 |
+
# internally `_get_ssl_context()` only returns `None` when
|
1127 |
+
# `req.is_ssl()` evaluates to `False` which is never gonna happen
|
1128 |
+
# in this code path. Of course, it's rather fragile
|
1129 |
+
# maintainability-wise but this is to be solved separately.
|
1130 |
+
sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
|
1131 |
+
|
1132 |
+
try:
|
1133 |
+
async with ceil_timeout(
|
1134 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1135 |
+
):
|
1136 |
+
try:
|
1137 |
+
tls_transport = await self._loop.start_tls(
|
1138 |
+
underlying_transport,
|
1139 |
+
tls_proto,
|
1140 |
+
sslcontext,
|
1141 |
+
server_hostname=req.server_hostname or req.host,
|
1142 |
+
ssl_handshake_timeout=timeout.total,
|
1143 |
+
)
|
1144 |
+
except BaseException:
|
1145 |
+
# We need to close the underlying transport since
|
1146 |
+
# `start_tls()` probably failed before it had a
|
1147 |
+
# chance to do this:
|
1148 |
+
underlying_transport.close()
|
1149 |
+
raise
|
1150 |
+
except cert_errors as exc:
|
1151 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
1152 |
+
except ssl_errors as exc:
|
1153 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
1154 |
+
except OSError as exc:
|
1155 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1156 |
+
raise
|
1157 |
+
raise client_error(req.connection_key, exc) from exc
|
1158 |
+
except TypeError as type_err:
|
1159 |
+
# Example cause looks like this:
|
1160 |
+
# TypeError: transport <asyncio.sslproto._SSLProtocolTransport
|
1161 |
+
# object at 0x7f760615e460> is not supported by start_tls()
|
1162 |
+
|
1163 |
+
raise ClientConnectionError(
|
1164 |
+
"Cannot initialize a TLS-in-TLS connection to host "
|
1165 |
+
f"{req.host!s}:{req.port:d} through an underlying connection "
|
1166 |
+
f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
|
1167 |
+
f"[{type_err!s}]"
|
1168 |
+
) from type_err
|
1169 |
+
else:
|
1170 |
+
if tls_transport is None:
|
1171 |
+
msg = "Failed to start TLS (possibly caused by closing transport)"
|
1172 |
+
raise client_error(req.connection_key, OSError(msg))
|
1173 |
+
tls_proto.connection_made(
|
1174 |
+
tls_transport
|
1175 |
+
) # Kick the state machine of the new TLS protocol
|
1176 |
+
|
1177 |
+
return tls_transport, tls_proto
|
1178 |
+
|
1179 |
+
async def _create_direct_connection(
|
1180 |
+
self,
|
1181 |
+
req: ClientRequest,
|
1182 |
+
traces: List["Trace"],
|
1183 |
+
timeout: "ClientTimeout",
|
1184 |
+
*,
|
1185 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1186 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
1187 |
+
sslcontext = self._get_ssl_context(req)
|
1188 |
+
fingerprint = self._get_fingerprint(req)
|
1189 |
+
|
1190 |
+
host = req.url.raw_host
|
1191 |
+
assert host is not None
|
1192 |
+
# Replace multiple trailing dots with a single one.
|
1193 |
+
# A trailing dot is only present for fully-qualified domain names.
|
1194 |
+
# See https://github.com/aio-libs/aiohttp/pull/7364.
|
1195 |
+
if host.endswith(".."):
|
1196 |
+
host = host.rstrip(".") + "."
|
1197 |
+
port = req.port
|
1198 |
+
assert port is not None
|
1199 |
+
try:
|
1200 |
+
# Cancelling this lookup should not cancel the underlying lookup
|
1201 |
+
# or else the cancel event will get broadcast to all the waiters
|
1202 |
+
# across all connections.
|
1203 |
+
hosts = await self._resolve_host(host, port, traces=traces)
|
1204 |
+
except OSError as exc:
|
1205 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1206 |
+
raise
|
1207 |
+
# in case of proxy it is not ClientProxyConnectionError
|
1208 |
+
# it is problem of resolving proxy ip itself
|
1209 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
1210 |
+
|
1211 |
+
last_exc: Optional[Exception] = None
|
1212 |
+
|
1213 |
+
for hinfo in hosts:
|
1214 |
+
host = hinfo["host"]
|
1215 |
+
port = hinfo["port"]
|
1216 |
+
|
1217 |
+
# Strip trailing dots, certificates contain FQDN without dots.
|
1218 |
+
# See https://github.com/aio-libs/aiohttp/issues/3636
|
1219 |
+
server_hostname = (
|
1220 |
+
(req.server_hostname or hinfo["hostname"]).rstrip(".")
|
1221 |
+
if sslcontext
|
1222 |
+
else None
|
1223 |
+
)
|
1224 |
+
|
1225 |
+
try:
|
1226 |
+
transp, proto = await self._wrap_create_connection(
|
1227 |
+
self._factory,
|
1228 |
+
host,
|
1229 |
+
port,
|
1230 |
+
timeout=timeout,
|
1231 |
+
ssl=sslcontext,
|
1232 |
+
family=hinfo["family"],
|
1233 |
+
proto=hinfo["proto"],
|
1234 |
+
flags=hinfo["flags"],
|
1235 |
+
server_hostname=server_hostname,
|
1236 |
+
local_addr=self._local_addr,
|
1237 |
+
req=req,
|
1238 |
+
client_error=client_error,
|
1239 |
+
)
|
1240 |
+
except ClientConnectorError as exc:
|
1241 |
+
last_exc = exc
|
1242 |
+
continue
|
1243 |
+
|
1244 |
+
if req.is_ssl() and fingerprint:
|
1245 |
+
try:
|
1246 |
+
fingerprint.check(transp)
|
1247 |
+
except ServerFingerprintMismatch as exc:
|
1248 |
+
transp.close()
|
1249 |
+
if not self._cleanup_closed_disabled:
|
1250 |
+
self._cleanup_closed_transports.append(transp)
|
1251 |
+
last_exc = exc
|
1252 |
+
continue
|
1253 |
+
|
1254 |
+
return transp, proto
|
1255 |
+
else:
|
1256 |
+
assert last_exc is not None
|
1257 |
+
raise last_exc
|
1258 |
+
|
1259 |
+
async def _create_proxy_connection(
|
1260 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1261 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
1262 |
+
self._fail_on_no_start_tls(req)
|
1263 |
+
runtime_has_start_tls = self._loop_supports_start_tls()
|
1264 |
+
|
1265 |
+
headers: Dict[str, str] = {}
|
1266 |
+
if req.proxy_headers is not None:
|
1267 |
+
headers = req.proxy_headers # type: ignore[assignment]
|
1268 |
+
headers[hdrs.HOST] = req.headers[hdrs.HOST]
|
1269 |
+
|
1270 |
+
url = req.proxy
|
1271 |
+
assert url is not None
|
1272 |
+
proxy_req = ClientRequest(
|
1273 |
+
hdrs.METH_GET,
|
1274 |
+
url,
|
1275 |
+
headers=headers,
|
1276 |
+
auth=req.proxy_auth,
|
1277 |
+
loop=self._loop,
|
1278 |
+
ssl=req.ssl,
|
1279 |
+
)
|
1280 |
+
|
1281 |
+
# create connection to proxy server
|
1282 |
+
transport, proto = await self._create_direct_connection(
|
1283 |
+
proxy_req, [], timeout, client_error=ClientProxyConnectionError
|
1284 |
+
)
|
1285 |
+
|
1286 |
+
# Many HTTP proxies has buggy keepalive support. Let's not
|
1287 |
+
# reuse connection but close it after processing every
|
1288 |
+
# response.
|
1289 |
+
proto.force_close()
|
1290 |
+
|
1291 |
+
auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
|
1292 |
+
if auth is not None:
|
1293 |
+
if not req.is_ssl():
|
1294 |
+
req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
1295 |
+
else:
|
1296 |
+
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
1297 |
+
|
1298 |
+
if req.is_ssl():
|
1299 |
+
if runtime_has_start_tls:
|
1300 |
+
self._warn_about_tls_in_tls(transport, req)
|
1301 |
+
|
1302 |
+
# For HTTPS requests over HTTP proxy
|
1303 |
+
# we must notify proxy to tunnel connection
|
1304 |
+
# so we send CONNECT command:
|
1305 |
+
# CONNECT www.python.org:443 HTTP/1.1
|
1306 |
+
# Host: www.python.org
|
1307 |
+
#
|
1308 |
+
# next we must do TLS handshake and so on
|
1309 |
+
# to do this we must wrap raw socket into secure one
|
1310 |
+
# asyncio handles this perfectly
|
1311 |
+
proxy_req.method = hdrs.METH_CONNECT
|
1312 |
+
proxy_req.url = req.url
|
1313 |
+
key = attr.evolve(
|
1314 |
+
req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
|
1315 |
+
)
|
1316 |
+
conn = Connection(self, key, proto, self._loop)
|
1317 |
+
proxy_resp = await proxy_req.send(conn)
|
1318 |
+
try:
|
1319 |
+
protocol = conn._protocol
|
1320 |
+
assert protocol is not None
|
1321 |
+
|
1322 |
+
# read_until_eof=True will ensure the connection isn't closed
|
1323 |
+
# once the response is received and processed allowing
|
1324 |
+
# START_TLS to work on the connection below.
|
1325 |
+
protocol.set_response_params(
|
1326 |
+
read_until_eof=runtime_has_start_tls,
|
1327 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
1328 |
+
)
|
1329 |
+
resp = await proxy_resp.start(conn)
|
1330 |
+
except BaseException:
|
1331 |
+
proxy_resp.close()
|
1332 |
+
conn.close()
|
1333 |
+
raise
|
1334 |
+
else:
|
1335 |
+
conn._protocol = None
|
1336 |
+
conn._transport = None
|
1337 |
+
try:
|
1338 |
+
if resp.status != 200:
|
1339 |
+
message = resp.reason
|
1340 |
+
if message is None:
|
1341 |
+
message = HTTPStatus(resp.status).phrase
|
1342 |
+
raise ClientHttpProxyError(
|
1343 |
+
proxy_resp.request_info,
|
1344 |
+
resp.history,
|
1345 |
+
status=resp.status,
|
1346 |
+
message=message,
|
1347 |
+
headers=resp.headers,
|
1348 |
+
)
|
1349 |
+
if not runtime_has_start_tls:
|
1350 |
+
rawsock = transport.get_extra_info("socket", default=None)
|
1351 |
+
if rawsock is None:
|
1352 |
+
raise RuntimeError(
|
1353 |
+
"Transport does not expose socket instance"
|
1354 |
+
)
|
1355 |
+
# Duplicate the socket, so now we can close proxy transport
|
1356 |
+
rawsock = rawsock.dup()
|
1357 |
+
except BaseException:
|
1358 |
+
# It shouldn't be closed in `finally` because it's fed to
|
1359 |
+
# `loop.start_tls()` and the docs say not to touch it after
|
1360 |
+
# passing there.
|
1361 |
+
transport.close()
|
1362 |
+
raise
|
1363 |
+
finally:
|
1364 |
+
if not runtime_has_start_tls:
|
1365 |
+
transport.close()
|
1366 |
+
|
1367 |
+
if not runtime_has_start_tls:
|
1368 |
+
# HTTP proxy with support for upgrade to HTTPS
|
1369 |
+
sslcontext = self._get_ssl_context(req)
|
1370 |
+
return await self._wrap_create_connection(
|
1371 |
+
self._factory,
|
1372 |
+
timeout=timeout,
|
1373 |
+
ssl=sslcontext,
|
1374 |
+
sock=rawsock,
|
1375 |
+
server_hostname=req.host,
|
1376 |
+
req=req,
|
1377 |
+
)
|
1378 |
+
|
1379 |
+
return await self._start_tls_connection(
|
1380 |
+
# Access the old transport for the last time before it's
|
1381 |
+
# closed and forgotten forever:
|
1382 |
+
transport,
|
1383 |
+
req=req,
|
1384 |
+
timeout=timeout,
|
1385 |
+
)
|
1386 |
+
finally:
|
1387 |
+
proxy_resp.close()
|
1388 |
+
|
1389 |
+
return transport, proto
|
1390 |
+
|
1391 |
+
|
1392 |
+
class UnixConnector(BaseConnector):
|
1393 |
+
"""Unix socket connector.
|
1394 |
+
|
1395 |
+
path - Unix socket path.
|
1396 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
1397 |
+
force_close - Set to True to force close and do reconnect
|
1398 |
+
after each request (and between redirects).
|
1399 |
+
limit - The total number of simultaneous connections.
|
1400 |
+
limit_per_host - Number of simultaneous connections to one host.
|
1401 |
+
loop - Optional event loop.
|
1402 |
+
"""
|
1403 |
+
|
1404 |
+
def __init__(
|
1405 |
+
self,
|
1406 |
+
path: str,
|
1407 |
+
force_close: bool = False,
|
1408 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
1409 |
+
limit: int = 100,
|
1410 |
+
limit_per_host: int = 0,
|
1411 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1412 |
+
) -> None:
|
1413 |
+
super().__init__(
|
1414 |
+
force_close=force_close,
|
1415 |
+
keepalive_timeout=keepalive_timeout,
|
1416 |
+
limit=limit,
|
1417 |
+
limit_per_host=limit_per_host,
|
1418 |
+
loop=loop,
|
1419 |
+
)
|
1420 |
+
self._path = path
|
1421 |
+
|
1422 |
+
@property
|
1423 |
+
def path(self) -> str:
|
1424 |
+
"""Path to unix socket."""
|
1425 |
+
return self._path
|
1426 |
+
|
1427 |
+
async def _create_connection(
|
1428 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1429 |
+
) -> ResponseHandler:
|
1430 |
+
try:
|
1431 |
+
async with ceil_timeout(
|
1432 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1433 |
+
):
|
1434 |
+
_, proto = await self._loop.create_unix_connection(
|
1435 |
+
self._factory, self._path
|
1436 |
+
)
|
1437 |
+
except OSError as exc:
|
1438 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1439 |
+
raise
|
1440 |
+
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
|
1441 |
+
|
1442 |
+
return proto
|
1443 |
+
|
1444 |
+
|
1445 |
+
class NamedPipeConnector(BaseConnector):
|
1446 |
+
"""Named pipe connector.
|
1447 |
+
|
1448 |
+
Only supported by the proactor event loop.
|
1449 |
+
See also: https://docs.python.org/3/library/asyncio-eventloop.html
|
1450 |
+
|
1451 |
+
path - Windows named pipe path.
|
1452 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
1453 |
+
force_close - Set to True to force close and do reconnect
|
1454 |
+
after each request (and between redirects).
|
1455 |
+
limit - The total number of simultaneous connections.
|
1456 |
+
limit_per_host - Number of simultaneous connections to one host.
|
1457 |
+
loop - Optional event loop.
|
1458 |
+
"""
|
1459 |
+
|
1460 |
+
def __init__(
|
1461 |
+
self,
|
1462 |
+
path: str,
|
1463 |
+
force_close: bool = False,
|
1464 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
1465 |
+
limit: int = 100,
|
1466 |
+
limit_per_host: int = 0,
|
1467 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1468 |
+
) -> None:
|
1469 |
+
super().__init__(
|
1470 |
+
force_close=force_close,
|
1471 |
+
keepalive_timeout=keepalive_timeout,
|
1472 |
+
limit=limit,
|
1473 |
+
limit_per_host=limit_per_host,
|
1474 |
+
loop=loop,
|
1475 |
+
)
|
1476 |
+
if not isinstance(
|
1477 |
+
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
1478 |
+
):
|
1479 |
+
raise RuntimeError(
|
1480 |
+
"Named Pipes only available in proactor " "loop under windows"
|
1481 |
+
)
|
1482 |
+
self._path = path
|
1483 |
+
|
1484 |
+
@property
|
1485 |
+
def path(self) -> str:
|
1486 |
+
"""Path to the named pipe."""
|
1487 |
+
return self._path
|
1488 |
+
|
1489 |
+
async def _create_connection(
|
1490 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1491 |
+
) -> ResponseHandler:
|
1492 |
+
try:
|
1493 |
+
async with ceil_timeout(
|
1494 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1495 |
+
):
|
1496 |
+
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
|
1497 |
+
self._factory, self._path
|
1498 |
+
)
|
1499 |
+
# the drain is required so that the connection_made is called
|
1500 |
+
# and transport is set otherwise it is not set before the
|
1501 |
+
# `assert conn.transport is not None`
|
1502 |
+
# in client.py's _request method
|
1503 |
+
await asyncio.sleep(0)
|
1504 |
+
# other option is to manually set transport like
|
1505 |
+
# `proto.transport = trans`
|
1506 |
+
except OSError as exc:
|
1507 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1508 |
+
raise
|
1509 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
1510 |
+
|
1511 |
+
return cast(ResponseHandler, proto)
|
venv/lib/python3.10/site-packages/aiohttp/formdata.py
ADDED
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import io
|
2 |
+
import warnings
|
3 |
+
from typing import Any, Iterable, List, Optional
|
4 |
+
from urllib.parse import urlencode
|
5 |
+
|
6 |
+
from multidict import MultiDict, MultiDictProxy
|
7 |
+
|
8 |
+
from . import hdrs, multipart, payload
|
9 |
+
from .helpers import guess_filename
|
10 |
+
from .payload import Payload
|
11 |
+
|
12 |
+
__all__ = ("FormData",)
|
13 |
+
|
14 |
+
|
15 |
+
class FormData:
|
16 |
+
"""Helper class for form body generation.
|
17 |
+
|
18 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
19 |
+
"""
|
20 |
+
|
21 |
+
def __init__(
|
22 |
+
self,
|
23 |
+
fields: Iterable[Any] = (),
|
24 |
+
quote_fields: bool = True,
|
25 |
+
charset: Optional[str] = None,
|
26 |
+
) -> None:
|
27 |
+
self._writer = multipart.MultipartWriter("form-data")
|
28 |
+
self._fields: List[Any] = []
|
29 |
+
self._is_multipart = False
|
30 |
+
self._is_processed = False
|
31 |
+
self._quote_fields = quote_fields
|
32 |
+
self._charset = charset
|
33 |
+
|
34 |
+
if isinstance(fields, dict):
|
35 |
+
fields = list(fields.items())
|
36 |
+
elif not isinstance(fields, (list, tuple)):
|
37 |
+
fields = (fields,)
|
38 |
+
self.add_fields(*fields)
|
39 |
+
|
40 |
+
@property
|
41 |
+
def is_multipart(self) -> bool:
|
42 |
+
return self._is_multipart
|
43 |
+
|
44 |
+
def add_field(
|
45 |
+
self,
|
46 |
+
name: str,
|
47 |
+
value: Any,
|
48 |
+
*,
|
49 |
+
content_type: Optional[str] = None,
|
50 |
+
filename: Optional[str] = None,
|
51 |
+
content_transfer_encoding: Optional[str] = None,
|
52 |
+
) -> None:
|
53 |
+
|
54 |
+
if isinstance(value, io.IOBase):
|
55 |
+
self._is_multipart = True
|
56 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
57 |
+
msg = (
|
58 |
+
"In v4, passing bytes will no longer create a file field. "
|
59 |
+
"Please explicitly use the filename parameter or pass a BytesIO object."
|
60 |
+
)
|
61 |
+
if filename is None and content_transfer_encoding is None:
|
62 |
+
warnings.warn(msg, DeprecationWarning)
|
63 |
+
filename = name
|
64 |
+
|
65 |
+
type_options: MultiDict[str] = MultiDict({"name": name})
|
66 |
+
if filename is not None and not isinstance(filename, str):
|
67 |
+
raise TypeError(
|
68 |
+
"filename must be an instance of str. " "Got: %s" % filename
|
69 |
+
)
|
70 |
+
if filename is None and isinstance(value, io.IOBase):
|
71 |
+
filename = guess_filename(value, name)
|
72 |
+
if filename is not None:
|
73 |
+
type_options["filename"] = filename
|
74 |
+
self._is_multipart = True
|
75 |
+
|
76 |
+
headers = {}
|
77 |
+
if content_type is not None:
|
78 |
+
if not isinstance(content_type, str):
|
79 |
+
raise TypeError(
|
80 |
+
"content_type must be an instance of str. " "Got: %s" % content_type
|
81 |
+
)
|
82 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
83 |
+
self._is_multipart = True
|
84 |
+
if content_transfer_encoding is not None:
|
85 |
+
if not isinstance(content_transfer_encoding, str):
|
86 |
+
raise TypeError(
|
87 |
+
"content_transfer_encoding must be an instance"
|
88 |
+
" of str. Got: %s" % content_transfer_encoding
|
89 |
+
)
|
90 |
+
msg = (
|
91 |
+
"content_transfer_encoding is deprecated. "
|
92 |
+
"To maintain compatibility with v4 please pass a BytesPayload."
|
93 |
+
)
|
94 |
+
warnings.warn(msg, DeprecationWarning)
|
95 |
+
self._is_multipart = True
|
96 |
+
|
97 |
+
self._fields.append((type_options, headers, value))
|
98 |
+
|
99 |
+
def add_fields(self, *fields: Any) -> None:
|
100 |
+
to_add = list(fields)
|
101 |
+
|
102 |
+
while to_add:
|
103 |
+
rec = to_add.pop(0)
|
104 |
+
|
105 |
+
if isinstance(rec, io.IOBase):
|
106 |
+
k = guess_filename(rec, "unknown")
|
107 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
108 |
+
|
109 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
110 |
+
to_add.extend(rec.items())
|
111 |
+
|
112 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
113 |
+
k, fp = rec
|
114 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
115 |
+
|
116 |
+
else:
|
117 |
+
raise TypeError(
|
118 |
+
"Only io.IOBase, multidict and (name, file) "
|
119 |
+
"pairs allowed, use .add_field() for passing "
|
120 |
+
"more complex parameters, got {!r}".format(rec)
|
121 |
+
)
|
122 |
+
|
123 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
124 |
+
# form data (x-www-form-urlencoded)
|
125 |
+
data = []
|
126 |
+
for type_options, _, value in self._fields:
|
127 |
+
data.append((type_options["name"], value))
|
128 |
+
|
129 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
130 |
+
|
131 |
+
if charset == "utf-8":
|
132 |
+
content_type = "application/x-www-form-urlencoded"
|
133 |
+
else:
|
134 |
+
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
135 |
+
|
136 |
+
return payload.BytesPayload(
|
137 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
138 |
+
content_type=content_type,
|
139 |
+
)
|
140 |
+
|
141 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
142 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
143 |
+
if self._is_processed:
|
144 |
+
raise RuntimeError("Form data has been processed already")
|
145 |
+
for dispparams, headers, value in self._fields:
|
146 |
+
try:
|
147 |
+
if hdrs.CONTENT_TYPE in headers:
|
148 |
+
part = payload.get_payload(
|
149 |
+
value,
|
150 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
151 |
+
headers=headers,
|
152 |
+
encoding=self._charset,
|
153 |
+
)
|
154 |
+
else:
|
155 |
+
part = payload.get_payload(
|
156 |
+
value, headers=headers, encoding=self._charset
|
157 |
+
)
|
158 |
+
except Exception as exc:
|
159 |
+
raise TypeError(
|
160 |
+
"Can not serialize value type: %r\n "
|
161 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
162 |
+
) from exc
|
163 |
+
|
164 |
+
if dispparams:
|
165 |
+
part.set_content_disposition(
|
166 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
167 |
+
)
|
168 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
169 |
+
# Content-Length which were sent via chunked transfer encoding
|
170 |
+
assert part.headers is not None
|
171 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
172 |
+
|
173 |
+
self._writer.append_payload(part)
|
174 |
+
|
175 |
+
self._is_processed = True
|
176 |
+
return self._writer
|
177 |
+
|
178 |
+
def __call__(self) -> Payload:
|
179 |
+
if self._is_multipart:
|
180 |
+
return self._gen_form_data()
|
181 |
+
else:
|
182 |
+
return self._gen_form_urlencoded()
|
venv/lib/python3.10/site-packages/aiohttp/http_exceptions.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Low-level http related exceptions."""
|
2 |
+
|
3 |
+
|
4 |
+
from textwrap import indent
|
5 |
+
from typing import Optional, Union
|
6 |
+
|
7 |
+
from .typedefs import _CIMultiDict
|
8 |
+
|
9 |
+
__all__ = ("HttpProcessingError",)
|
10 |
+
|
11 |
+
|
12 |
+
class HttpProcessingError(Exception):
|
13 |
+
"""HTTP error.
|
14 |
+
|
15 |
+
Shortcut for raising HTTP errors with custom code, message and headers.
|
16 |
+
|
17 |
+
code: HTTP Error code.
|
18 |
+
message: (optional) Error message.
|
19 |
+
headers: (optional) Headers to be sent in response, a list of pairs
|
20 |
+
"""
|
21 |
+
|
22 |
+
code = 0
|
23 |
+
message = ""
|
24 |
+
headers = None
|
25 |
+
|
26 |
+
def __init__(
|
27 |
+
self,
|
28 |
+
*,
|
29 |
+
code: Optional[int] = None,
|
30 |
+
message: str = "",
|
31 |
+
headers: Optional[_CIMultiDict] = None,
|
32 |
+
) -> None:
|
33 |
+
if code is not None:
|
34 |
+
self.code = code
|
35 |
+
self.headers = headers
|
36 |
+
self.message = message
|
37 |
+
|
38 |
+
def __str__(self) -> str:
|
39 |
+
msg = indent(self.message, " ")
|
40 |
+
return f"{self.code}, message:\n{msg}"
|
41 |
+
|
42 |
+
def __repr__(self) -> str:
|
43 |
+
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
44 |
+
|
45 |
+
|
46 |
+
class BadHttpMessage(HttpProcessingError):
|
47 |
+
|
48 |
+
code = 400
|
49 |
+
message = "Bad Request"
|
50 |
+
|
51 |
+
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
52 |
+
super().__init__(message=message, headers=headers)
|
53 |
+
self.args = (message,)
|
54 |
+
|
55 |
+
|
56 |
+
class HttpBadRequest(BadHttpMessage):
|
57 |
+
|
58 |
+
code = 400
|
59 |
+
message = "Bad Request"
|
60 |
+
|
61 |
+
|
62 |
+
class PayloadEncodingError(BadHttpMessage):
|
63 |
+
"""Base class for payload errors"""
|
64 |
+
|
65 |
+
|
66 |
+
class ContentEncodingError(PayloadEncodingError):
|
67 |
+
"""Content encoding error."""
|
68 |
+
|
69 |
+
|
70 |
+
class TransferEncodingError(PayloadEncodingError):
|
71 |
+
"""transfer encoding error."""
|
72 |
+
|
73 |
+
|
74 |
+
class ContentLengthError(PayloadEncodingError):
|
75 |
+
"""Not enough data for satisfy content length header."""
|
76 |
+
|
77 |
+
|
78 |
+
class LineTooLong(BadHttpMessage):
|
79 |
+
def __init__(
|
80 |
+
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
81 |
+
) -> None:
|
82 |
+
super().__init__(
|
83 |
+
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
84 |
+
)
|
85 |
+
self.args = (line, limit, actual_size)
|
86 |
+
|
87 |
+
|
88 |
+
class InvalidHeader(BadHttpMessage):
|
89 |
+
def __init__(self, hdr: Union[bytes, str]) -> None:
|
90 |
+
hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
|
91 |
+
super().__init__(f"Invalid HTTP header: {hdr!r}")
|
92 |
+
self.hdr = hdr_s
|
93 |
+
self.args = (hdr,)
|
94 |
+
|
95 |
+
|
96 |
+
class BadStatusLine(BadHttpMessage):
|
97 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
98 |
+
if not isinstance(line, str):
|
99 |
+
line = repr(line)
|
100 |
+
super().__init__(error or f"Bad status line {line!r}")
|
101 |
+
self.args = (line,)
|
102 |
+
self.line = line
|
103 |
+
|
104 |
+
|
105 |
+
class InvalidURLError(BadHttpMessage):
|
106 |
+
pass
|
venv/lib/python3.10/site-packages/aiohttp/payload.py
ADDED
@@ -0,0 +1,463 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import enum
|
3 |
+
import io
|
4 |
+
import json
|
5 |
+
import mimetypes
|
6 |
+
import os
|
7 |
+
import warnings
|
8 |
+
from abc import ABC, abstractmethod
|
9 |
+
from itertools import chain
|
10 |
+
from typing import (
|
11 |
+
IO,
|
12 |
+
TYPE_CHECKING,
|
13 |
+
Any,
|
14 |
+
ByteString,
|
15 |
+
Dict,
|
16 |
+
Final,
|
17 |
+
Iterable,
|
18 |
+
Optional,
|
19 |
+
TextIO,
|
20 |
+
Tuple,
|
21 |
+
Type,
|
22 |
+
Union,
|
23 |
+
)
|
24 |
+
|
25 |
+
from multidict import CIMultiDict
|
26 |
+
|
27 |
+
from . import hdrs
|
28 |
+
from .abc import AbstractStreamWriter
|
29 |
+
from .helpers import (
|
30 |
+
_SENTINEL,
|
31 |
+
content_disposition_header,
|
32 |
+
guess_filename,
|
33 |
+
parse_mimetype,
|
34 |
+
sentinel,
|
35 |
+
)
|
36 |
+
from .streams import StreamReader
|
37 |
+
from .typedefs import JSONEncoder, _CIMultiDict
|
38 |
+
|
39 |
+
__all__ = (
|
40 |
+
"PAYLOAD_REGISTRY",
|
41 |
+
"get_payload",
|
42 |
+
"payload_type",
|
43 |
+
"Payload",
|
44 |
+
"BytesPayload",
|
45 |
+
"StringPayload",
|
46 |
+
"IOBasePayload",
|
47 |
+
"BytesIOPayload",
|
48 |
+
"BufferedReaderPayload",
|
49 |
+
"TextIOPayload",
|
50 |
+
"StringIOPayload",
|
51 |
+
"JsonPayload",
|
52 |
+
"AsyncIterablePayload",
|
53 |
+
)
|
54 |
+
|
55 |
+
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
56 |
+
|
57 |
+
if TYPE_CHECKING:
|
58 |
+
from typing import List
|
59 |
+
|
60 |
+
|
61 |
+
class LookupError(Exception):
|
62 |
+
pass
|
63 |
+
|
64 |
+
|
65 |
+
class Order(str, enum.Enum):
|
66 |
+
normal = "normal"
|
67 |
+
try_first = "try_first"
|
68 |
+
try_last = "try_last"
|
69 |
+
|
70 |
+
|
71 |
+
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
72 |
+
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
73 |
+
|
74 |
+
|
75 |
+
def register_payload(
|
76 |
+
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
77 |
+
) -> None:
|
78 |
+
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
79 |
+
|
80 |
+
|
81 |
+
class payload_type:
|
82 |
+
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
83 |
+
self.type = type
|
84 |
+
self.order = order
|
85 |
+
|
86 |
+
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
87 |
+
register_payload(factory, self.type, order=self.order)
|
88 |
+
return factory
|
89 |
+
|
90 |
+
|
91 |
+
PayloadType = Type["Payload"]
|
92 |
+
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
93 |
+
|
94 |
+
|
95 |
+
class PayloadRegistry:
|
96 |
+
"""Payload registry.
|
97 |
+
|
98 |
+
note: we need zope.interface for more efficient adapter search
|
99 |
+
"""
|
100 |
+
|
101 |
+
def __init__(self) -> None:
|
102 |
+
self._first: List[_PayloadRegistryItem] = []
|
103 |
+
self._normal: List[_PayloadRegistryItem] = []
|
104 |
+
self._last: List[_PayloadRegistryItem] = []
|
105 |
+
|
106 |
+
def get(
|
107 |
+
self,
|
108 |
+
data: Any,
|
109 |
+
*args: Any,
|
110 |
+
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
111 |
+
**kwargs: Any,
|
112 |
+
) -> "Payload":
|
113 |
+
if isinstance(data, Payload):
|
114 |
+
return data
|
115 |
+
for factory, type in _CHAIN(self._first, self._normal, self._last):
|
116 |
+
if isinstance(data, type):
|
117 |
+
return factory(data, *args, **kwargs)
|
118 |
+
|
119 |
+
raise LookupError()
|
120 |
+
|
121 |
+
def register(
|
122 |
+
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
123 |
+
) -> None:
|
124 |
+
if order is Order.try_first:
|
125 |
+
self._first.append((factory, type))
|
126 |
+
elif order is Order.normal:
|
127 |
+
self._normal.append((factory, type))
|
128 |
+
elif order is Order.try_last:
|
129 |
+
self._last.append((factory, type))
|
130 |
+
else:
|
131 |
+
raise ValueError(f"Unsupported order {order!r}")
|
132 |
+
|
133 |
+
|
134 |
+
class Payload(ABC):
|
135 |
+
|
136 |
+
_default_content_type: str = "application/octet-stream"
|
137 |
+
_size: Optional[int] = None
|
138 |
+
|
139 |
+
def __init__(
|
140 |
+
self,
|
141 |
+
value: Any,
|
142 |
+
headers: Optional[
|
143 |
+
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
144 |
+
] = None,
|
145 |
+
content_type: Union[str, None, _SENTINEL] = sentinel,
|
146 |
+
filename: Optional[str] = None,
|
147 |
+
encoding: Optional[str] = None,
|
148 |
+
**kwargs: Any,
|
149 |
+
) -> None:
|
150 |
+
self._encoding = encoding
|
151 |
+
self._filename = filename
|
152 |
+
self._headers: _CIMultiDict = CIMultiDict()
|
153 |
+
self._value = value
|
154 |
+
if content_type is not sentinel and content_type is not None:
|
155 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
156 |
+
elif self._filename is not None:
|
157 |
+
content_type = mimetypes.guess_type(self._filename)[0]
|
158 |
+
if content_type is None:
|
159 |
+
content_type = self._default_content_type
|
160 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
161 |
+
else:
|
162 |
+
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
163 |
+
self._headers.update(headers or {})
|
164 |
+
|
165 |
+
@property
|
166 |
+
def size(self) -> Optional[int]:
|
167 |
+
"""Size of the payload."""
|
168 |
+
return self._size
|
169 |
+
|
170 |
+
@property
|
171 |
+
def filename(self) -> Optional[str]:
|
172 |
+
"""Filename of the payload."""
|
173 |
+
return self._filename
|
174 |
+
|
175 |
+
@property
|
176 |
+
def headers(self) -> _CIMultiDict:
|
177 |
+
"""Custom item headers"""
|
178 |
+
return self._headers
|
179 |
+
|
180 |
+
@property
|
181 |
+
def _binary_headers(self) -> bytes:
|
182 |
+
return (
|
183 |
+
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
184 |
+
"utf-8"
|
185 |
+
)
|
186 |
+
+ b"\r\n"
|
187 |
+
)
|
188 |
+
|
189 |
+
@property
|
190 |
+
def encoding(self) -> Optional[str]:
|
191 |
+
"""Payload encoding"""
|
192 |
+
return self._encoding
|
193 |
+
|
194 |
+
@property
|
195 |
+
def content_type(self) -> str:
|
196 |
+
"""Content type"""
|
197 |
+
return self._headers[hdrs.CONTENT_TYPE]
|
198 |
+
|
199 |
+
def set_content_disposition(
|
200 |
+
self,
|
201 |
+
disptype: str,
|
202 |
+
quote_fields: bool = True,
|
203 |
+
_charset: str = "utf-8",
|
204 |
+
**params: Any,
|
205 |
+
) -> None:
|
206 |
+
"""Sets ``Content-Disposition`` header."""
|
207 |
+
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
208 |
+
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
209 |
+
)
|
210 |
+
|
211 |
+
@abstractmethod
|
212 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
213 |
+
"""Write payload.
|
214 |
+
|
215 |
+
writer is an AbstractStreamWriter instance:
|
216 |
+
"""
|
217 |
+
|
218 |
+
|
219 |
+
class BytesPayload(Payload):
|
220 |
+
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
|
221 |
+
if not isinstance(value, (bytes, bytearray, memoryview)):
|
222 |
+
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
223 |
+
|
224 |
+
if "content_type" not in kwargs:
|
225 |
+
kwargs["content_type"] = "application/octet-stream"
|
226 |
+
|
227 |
+
super().__init__(value, *args, **kwargs)
|
228 |
+
|
229 |
+
if isinstance(value, memoryview):
|
230 |
+
self._size = value.nbytes
|
231 |
+
else:
|
232 |
+
self._size = len(value)
|
233 |
+
|
234 |
+
if self._size > TOO_LARGE_BYTES_BODY:
|
235 |
+
kwargs = {"source": self}
|
236 |
+
warnings.warn(
|
237 |
+
"Sending a large body directly with raw bytes might"
|
238 |
+
" lock the event loop. You should probably pass an "
|
239 |
+
"io.BytesIO object instead",
|
240 |
+
ResourceWarning,
|
241 |
+
**kwargs,
|
242 |
+
)
|
243 |
+
|
244 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
245 |
+
await writer.write(self._value)
|
246 |
+
|
247 |
+
|
248 |
+
class StringPayload(BytesPayload):
|
249 |
+
def __init__(
|
250 |
+
self,
|
251 |
+
value: str,
|
252 |
+
*args: Any,
|
253 |
+
encoding: Optional[str] = None,
|
254 |
+
content_type: Optional[str] = None,
|
255 |
+
**kwargs: Any,
|
256 |
+
) -> None:
|
257 |
+
|
258 |
+
if encoding is None:
|
259 |
+
if content_type is None:
|
260 |
+
real_encoding = "utf-8"
|
261 |
+
content_type = "text/plain; charset=utf-8"
|
262 |
+
else:
|
263 |
+
mimetype = parse_mimetype(content_type)
|
264 |
+
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
265 |
+
else:
|
266 |
+
if content_type is None:
|
267 |
+
content_type = "text/plain; charset=%s" % encoding
|
268 |
+
real_encoding = encoding
|
269 |
+
|
270 |
+
super().__init__(
|
271 |
+
value.encode(real_encoding),
|
272 |
+
encoding=real_encoding,
|
273 |
+
content_type=content_type,
|
274 |
+
*args,
|
275 |
+
**kwargs,
|
276 |
+
)
|
277 |
+
|
278 |
+
|
279 |
+
class StringIOPayload(StringPayload):
|
280 |
+
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
281 |
+
super().__init__(value.read(), *args, **kwargs)
|
282 |
+
|
283 |
+
|
284 |
+
class IOBasePayload(Payload):
|
285 |
+
_value: IO[Any]
|
286 |
+
|
287 |
+
def __init__(
|
288 |
+
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
289 |
+
) -> None:
|
290 |
+
if "filename" not in kwargs:
|
291 |
+
kwargs["filename"] = guess_filename(value)
|
292 |
+
|
293 |
+
super().__init__(value, *args, **kwargs)
|
294 |
+
|
295 |
+
if self._filename is not None and disposition is not None:
|
296 |
+
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
297 |
+
self.set_content_disposition(disposition, filename=self._filename)
|
298 |
+
|
299 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
300 |
+
loop = asyncio.get_event_loop()
|
301 |
+
try:
|
302 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
303 |
+
while chunk:
|
304 |
+
await writer.write(chunk)
|
305 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
306 |
+
finally:
|
307 |
+
await loop.run_in_executor(None, self._value.close)
|
308 |
+
|
309 |
+
|
310 |
+
class TextIOPayload(IOBasePayload):
|
311 |
+
_value: TextIO
|
312 |
+
|
313 |
+
def __init__(
|
314 |
+
self,
|
315 |
+
value: TextIO,
|
316 |
+
*args: Any,
|
317 |
+
encoding: Optional[str] = None,
|
318 |
+
content_type: Optional[str] = None,
|
319 |
+
**kwargs: Any,
|
320 |
+
) -> None:
|
321 |
+
|
322 |
+
if encoding is None:
|
323 |
+
if content_type is None:
|
324 |
+
encoding = "utf-8"
|
325 |
+
content_type = "text/plain; charset=utf-8"
|
326 |
+
else:
|
327 |
+
mimetype = parse_mimetype(content_type)
|
328 |
+
encoding = mimetype.parameters.get("charset", "utf-8")
|
329 |
+
else:
|
330 |
+
if content_type is None:
|
331 |
+
content_type = "text/plain; charset=%s" % encoding
|
332 |
+
|
333 |
+
super().__init__(
|
334 |
+
value,
|
335 |
+
content_type=content_type,
|
336 |
+
encoding=encoding,
|
337 |
+
*args,
|
338 |
+
**kwargs,
|
339 |
+
)
|
340 |
+
|
341 |
+
@property
|
342 |
+
def size(self) -> Optional[int]:
|
343 |
+
try:
|
344 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
345 |
+
except OSError:
|
346 |
+
return None
|
347 |
+
|
348 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
349 |
+
loop = asyncio.get_event_loop()
|
350 |
+
try:
|
351 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
352 |
+
while chunk:
|
353 |
+
data = (
|
354 |
+
chunk.encode(encoding=self._encoding)
|
355 |
+
if self._encoding
|
356 |
+
else chunk.encode()
|
357 |
+
)
|
358 |
+
await writer.write(data)
|
359 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
360 |
+
finally:
|
361 |
+
await loop.run_in_executor(None, self._value.close)
|
362 |
+
|
363 |
+
|
364 |
+
class BytesIOPayload(IOBasePayload):
|
365 |
+
@property
|
366 |
+
def size(self) -> int:
|
367 |
+
position = self._value.tell()
|
368 |
+
end = self._value.seek(0, os.SEEK_END)
|
369 |
+
self._value.seek(position)
|
370 |
+
return end - position
|
371 |
+
|
372 |
+
|
373 |
+
class BufferedReaderPayload(IOBasePayload):
|
374 |
+
@property
|
375 |
+
def size(self) -> Optional[int]:
|
376 |
+
try:
|
377 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
378 |
+
except OSError:
|
379 |
+
# data.fileno() is not supported, e.g.
|
380 |
+
# io.BufferedReader(io.BytesIO(b'data'))
|
381 |
+
return None
|
382 |
+
|
383 |
+
|
384 |
+
class JsonPayload(BytesPayload):
|
385 |
+
def __init__(
|
386 |
+
self,
|
387 |
+
value: Any,
|
388 |
+
encoding: str = "utf-8",
|
389 |
+
content_type: str = "application/json",
|
390 |
+
dumps: JSONEncoder = json.dumps,
|
391 |
+
*args: Any,
|
392 |
+
**kwargs: Any,
|
393 |
+
) -> None:
|
394 |
+
|
395 |
+
super().__init__(
|
396 |
+
dumps(value).encode(encoding),
|
397 |
+
content_type=content_type,
|
398 |
+
encoding=encoding,
|
399 |
+
*args,
|
400 |
+
**kwargs,
|
401 |
+
)
|
402 |
+
|
403 |
+
|
404 |
+
if TYPE_CHECKING:
|
405 |
+
from typing import AsyncIterable, AsyncIterator
|
406 |
+
|
407 |
+
_AsyncIterator = AsyncIterator[bytes]
|
408 |
+
_AsyncIterable = AsyncIterable[bytes]
|
409 |
+
else:
|
410 |
+
from collections.abc import AsyncIterable, AsyncIterator
|
411 |
+
|
412 |
+
_AsyncIterator = AsyncIterator
|
413 |
+
_AsyncIterable = AsyncIterable
|
414 |
+
|
415 |
+
|
416 |
+
class AsyncIterablePayload(Payload):
|
417 |
+
|
418 |
+
_iter: Optional[_AsyncIterator] = None
|
419 |
+
|
420 |
+
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
421 |
+
if not isinstance(value, AsyncIterable):
|
422 |
+
raise TypeError(
|
423 |
+
"value argument must support "
|
424 |
+
"collections.abc.AsyncIterable interface, "
|
425 |
+
"got {!r}".format(type(value))
|
426 |
+
)
|
427 |
+
|
428 |
+
if "content_type" not in kwargs:
|
429 |
+
kwargs["content_type"] = "application/octet-stream"
|
430 |
+
|
431 |
+
super().__init__(value, *args, **kwargs)
|
432 |
+
|
433 |
+
self._iter = value.__aiter__()
|
434 |
+
|
435 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
436 |
+
if self._iter:
|
437 |
+
try:
|
438 |
+
# iter is not None check prevents rare cases
|
439 |
+
# when the case iterable is used twice
|
440 |
+
while True:
|
441 |
+
chunk = await self._iter.__anext__()
|
442 |
+
await writer.write(chunk)
|
443 |
+
except StopAsyncIteration:
|
444 |
+
self._iter = None
|
445 |
+
|
446 |
+
|
447 |
+
class StreamReaderPayload(AsyncIterablePayload):
|
448 |
+
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
449 |
+
super().__init__(value.iter_any(), *args, **kwargs)
|
450 |
+
|
451 |
+
|
452 |
+
PAYLOAD_REGISTRY = PayloadRegistry()
|
453 |
+
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
454 |
+
PAYLOAD_REGISTRY.register(StringPayload, str)
|
455 |
+
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
456 |
+
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
457 |
+
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
458 |
+
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
459 |
+
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
460 |
+
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
461 |
+
# try_last for giving a chance to more specialized async interables like
|
462 |
+
# multidict.BodyPartReaderPayload override the default
|
463 |
+
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
venv/lib/python3.10/site-packages/aiohttp/pytest_plugin.py
ADDED
@@ -0,0 +1,381 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import contextlib
|
3 |
+
import warnings
|
4 |
+
from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union
|
5 |
+
|
6 |
+
import pytest
|
7 |
+
|
8 |
+
from aiohttp.helpers import isasyncgenfunction
|
9 |
+
from aiohttp.web import Application
|
10 |
+
|
11 |
+
from .test_utils import (
|
12 |
+
BaseTestServer,
|
13 |
+
RawTestServer,
|
14 |
+
TestClient,
|
15 |
+
TestServer,
|
16 |
+
loop_context,
|
17 |
+
setup_test_loop,
|
18 |
+
teardown_test_loop,
|
19 |
+
unused_port as _unused_port,
|
20 |
+
)
|
21 |
+
|
22 |
+
try:
|
23 |
+
import uvloop
|
24 |
+
except ImportError: # pragma: no cover
|
25 |
+
uvloop = None # type: ignore[assignment]
|
26 |
+
|
27 |
+
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
|
28 |
+
AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]]
|
29 |
+
AiohttpServer = Callable[[Application], Awaitable[TestServer]]
|
30 |
+
|
31 |
+
|
32 |
+
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
33 |
+
parser.addoption(
|
34 |
+
"--aiohttp-fast",
|
35 |
+
action="store_true",
|
36 |
+
default=False,
|
37 |
+
help="run tests faster by disabling extra checks",
|
38 |
+
)
|
39 |
+
parser.addoption(
|
40 |
+
"--aiohttp-loop",
|
41 |
+
action="store",
|
42 |
+
default="pyloop",
|
43 |
+
help="run tests with specific loop: pyloop, uvloop or all",
|
44 |
+
)
|
45 |
+
parser.addoption(
|
46 |
+
"--aiohttp-enable-loop-debug",
|
47 |
+
action="store_true",
|
48 |
+
default=False,
|
49 |
+
help="enable event loop debug mode",
|
50 |
+
)
|
51 |
+
|
52 |
+
|
53 |
+
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
54 |
+
"""Set up pytest fixture.
|
55 |
+
|
56 |
+
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
57 |
+
"""
|
58 |
+
func = fixturedef.func
|
59 |
+
|
60 |
+
if isasyncgenfunction(func):
|
61 |
+
# async generator fixture
|
62 |
+
is_async_gen = True
|
63 |
+
elif asyncio.iscoroutinefunction(func):
|
64 |
+
# regular async fixture
|
65 |
+
is_async_gen = False
|
66 |
+
else:
|
67 |
+
# not an async fixture, nothing to do
|
68 |
+
return
|
69 |
+
|
70 |
+
strip_request = False
|
71 |
+
if "request" not in fixturedef.argnames:
|
72 |
+
fixturedef.argnames += ("request",)
|
73 |
+
strip_request = True
|
74 |
+
|
75 |
+
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
76 |
+
request = kwargs["request"]
|
77 |
+
if strip_request:
|
78 |
+
del kwargs["request"]
|
79 |
+
|
80 |
+
# if neither the fixture nor the test use the 'loop' fixture,
|
81 |
+
# 'getfixturevalue' will fail because the test is not parameterized
|
82 |
+
# (this can be removed someday if 'loop' is no longer parameterized)
|
83 |
+
if "loop" not in request.fixturenames:
|
84 |
+
raise Exception(
|
85 |
+
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
86 |
+
"be used in tests depending from it."
|
87 |
+
)
|
88 |
+
|
89 |
+
_loop = request.getfixturevalue("loop")
|
90 |
+
|
91 |
+
if is_async_gen:
|
92 |
+
# for async generators, we need to advance the generator once,
|
93 |
+
# then advance it again in a finalizer
|
94 |
+
gen = func(*args, **kwargs)
|
95 |
+
|
96 |
+
def finalizer(): # type: ignore[no-untyped-def]
|
97 |
+
try:
|
98 |
+
return _loop.run_until_complete(gen.__anext__())
|
99 |
+
except StopAsyncIteration:
|
100 |
+
pass
|
101 |
+
|
102 |
+
request.addfinalizer(finalizer)
|
103 |
+
return _loop.run_until_complete(gen.__anext__())
|
104 |
+
else:
|
105 |
+
return _loop.run_until_complete(func(*args, **kwargs))
|
106 |
+
|
107 |
+
fixturedef.func = wrapper
|
108 |
+
|
109 |
+
|
110 |
+
@pytest.fixture
|
111 |
+
def fast(request): # type: ignore[no-untyped-def]
|
112 |
+
"""--fast config option"""
|
113 |
+
return request.config.getoption("--aiohttp-fast")
|
114 |
+
|
115 |
+
|
116 |
+
@pytest.fixture
|
117 |
+
def loop_debug(request): # type: ignore[no-untyped-def]
|
118 |
+
"""--enable-loop-debug config option"""
|
119 |
+
return request.config.getoption("--aiohttp-enable-loop-debug")
|
120 |
+
|
121 |
+
|
122 |
+
@contextlib.contextmanager
|
123 |
+
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
124 |
+
"""Context manager which checks for RuntimeWarnings.
|
125 |
+
|
126 |
+
This exists specifically to
|
127 |
+
avoid "coroutine 'X' was never awaited" warnings being missed.
|
128 |
+
|
129 |
+
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
130 |
+
"""
|
131 |
+
with warnings.catch_warnings(record=True) as _warnings:
|
132 |
+
yield
|
133 |
+
rw = [
|
134 |
+
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
135 |
+
for w in _warnings
|
136 |
+
if w.category == RuntimeWarning
|
137 |
+
]
|
138 |
+
if rw:
|
139 |
+
raise RuntimeError(
|
140 |
+
"{} Runtime Warning{},\n{}".format(
|
141 |
+
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
142 |
+
)
|
143 |
+
)
|
144 |
+
|
145 |
+
|
146 |
+
@contextlib.contextmanager
|
147 |
+
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
148 |
+
"""Passthrough loop context.
|
149 |
+
|
150 |
+
Sets up and tears down a loop unless one is passed in via the loop
|
151 |
+
argument when it's passed straight through.
|
152 |
+
"""
|
153 |
+
if loop:
|
154 |
+
# loop already exists, pass it straight through
|
155 |
+
yield loop
|
156 |
+
else:
|
157 |
+
# this shadows loop_context's standard behavior
|
158 |
+
loop = setup_test_loop()
|
159 |
+
yield loop
|
160 |
+
teardown_test_loop(loop, fast=fast)
|
161 |
+
|
162 |
+
|
163 |
+
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
164 |
+
"""Fix pytest collecting for coroutines."""
|
165 |
+
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
166 |
+
return list(collector._genfunctions(name, obj))
|
167 |
+
|
168 |
+
|
169 |
+
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
170 |
+
"""Run coroutines in an event loop instead of a normal function call."""
|
171 |
+
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
172 |
+
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
173 |
+
existing_loop = pyfuncitem.funcargs.get(
|
174 |
+
"proactor_loop"
|
175 |
+
) or pyfuncitem.funcargs.get("loop", None)
|
176 |
+
with _runtime_warning_context():
|
177 |
+
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
178 |
+
testargs = {
|
179 |
+
arg: pyfuncitem.funcargs[arg]
|
180 |
+
for arg in pyfuncitem._fixtureinfo.argnames
|
181 |
+
}
|
182 |
+
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
183 |
+
|
184 |
+
return True
|
185 |
+
|
186 |
+
|
187 |
+
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
188 |
+
if "loop_factory" not in metafunc.fixturenames:
|
189 |
+
return
|
190 |
+
|
191 |
+
loops = metafunc.config.option.aiohttp_loop
|
192 |
+
avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
|
193 |
+
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
194 |
+
|
195 |
+
if uvloop is not None: # pragma: no cover
|
196 |
+
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
197 |
+
|
198 |
+
if loops == "all":
|
199 |
+
loops = "pyloop,uvloop?"
|
200 |
+
|
201 |
+
factories = {} # type: ignore[var-annotated]
|
202 |
+
for name in loops.split(","):
|
203 |
+
required = not name.endswith("?")
|
204 |
+
name = name.strip(" ?")
|
205 |
+
if name not in avail_factories: # pragma: no cover
|
206 |
+
if required:
|
207 |
+
raise ValueError(
|
208 |
+
"Unknown loop '%s', available loops: %s"
|
209 |
+
% (name, list(factories.keys()))
|
210 |
+
)
|
211 |
+
else:
|
212 |
+
continue
|
213 |
+
factories[name] = avail_factories[name]
|
214 |
+
metafunc.parametrize(
|
215 |
+
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
216 |
+
)
|
217 |
+
|
218 |
+
|
219 |
+
@pytest.fixture
|
220 |
+
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
221 |
+
"""Return an instance of the event loop."""
|
222 |
+
policy = loop_factory()
|
223 |
+
asyncio.set_event_loop_policy(policy)
|
224 |
+
with loop_context(fast=fast) as _loop:
|
225 |
+
if loop_debug:
|
226 |
+
_loop.set_debug(True) # pragma: no cover
|
227 |
+
asyncio.set_event_loop(_loop)
|
228 |
+
yield _loop
|
229 |
+
|
230 |
+
|
231 |
+
@pytest.fixture
|
232 |
+
def proactor_loop(): # type: ignore[no-untyped-def]
|
233 |
+
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
234 |
+
asyncio.set_event_loop_policy(policy)
|
235 |
+
|
236 |
+
with loop_context(policy.new_event_loop) as _loop:
|
237 |
+
asyncio.set_event_loop(_loop)
|
238 |
+
yield _loop
|
239 |
+
|
240 |
+
|
241 |
+
@pytest.fixture
|
242 |
+
def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
|
243 |
+
warnings.warn(
|
244 |
+
"Deprecated, use aiohttp_unused_port fixture instead",
|
245 |
+
DeprecationWarning,
|
246 |
+
stacklevel=2,
|
247 |
+
)
|
248 |
+
return aiohttp_unused_port
|
249 |
+
|
250 |
+
|
251 |
+
@pytest.fixture
|
252 |
+
def aiohttp_unused_port() -> Callable[[], int]:
|
253 |
+
"""Return a port that is unused on the current host."""
|
254 |
+
return _unused_port
|
255 |
+
|
256 |
+
|
257 |
+
@pytest.fixture
|
258 |
+
def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
|
259 |
+
"""Factory to create a TestServer instance, given an app.
|
260 |
+
|
261 |
+
aiohttp_server(app, **kwargs)
|
262 |
+
"""
|
263 |
+
servers = []
|
264 |
+
|
265 |
+
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
266 |
+
server = TestServer(app, port=port)
|
267 |
+
await server.start_server(loop=loop, **kwargs)
|
268 |
+
servers.append(server)
|
269 |
+
return server
|
270 |
+
|
271 |
+
yield go
|
272 |
+
|
273 |
+
async def finalize() -> None:
|
274 |
+
while servers:
|
275 |
+
await servers.pop().close()
|
276 |
+
|
277 |
+
loop.run_until_complete(finalize())
|
278 |
+
|
279 |
+
|
280 |
+
@pytest.fixture
|
281 |
+
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
282 |
+
warnings.warn(
|
283 |
+
"Deprecated, use aiohttp_server fixture instead",
|
284 |
+
DeprecationWarning,
|
285 |
+
stacklevel=2,
|
286 |
+
)
|
287 |
+
return aiohttp_server
|
288 |
+
|
289 |
+
|
290 |
+
@pytest.fixture
|
291 |
+
def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
|
292 |
+
"""Factory to create a RawTestServer instance, given a web handler.
|
293 |
+
|
294 |
+
aiohttp_raw_server(handler, **kwargs)
|
295 |
+
"""
|
296 |
+
servers = []
|
297 |
+
|
298 |
+
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
299 |
+
server = RawTestServer(handler, port=port)
|
300 |
+
await server.start_server(loop=loop, **kwargs)
|
301 |
+
servers.append(server)
|
302 |
+
return server
|
303 |
+
|
304 |
+
yield go
|
305 |
+
|
306 |
+
async def finalize() -> None:
|
307 |
+
while servers:
|
308 |
+
await servers.pop().close()
|
309 |
+
|
310 |
+
loop.run_until_complete(finalize())
|
311 |
+
|
312 |
+
|
313 |
+
@pytest.fixture
|
314 |
+
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
315 |
+
aiohttp_raw_server,
|
316 |
+
):
|
317 |
+
warnings.warn(
|
318 |
+
"Deprecated, use aiohttp_raw_server fixture instead",
|
319 |
+
DeprecationWarning,
|
320 |
+
stacklevel=2,
|
321 |
+
)
|
322 |
+
return aiohttp_raw_server
|
323 |
+
|
324 |
+
|
325 |
+
@pytest.fixture
|
326 |
+
def aiohttp_client(
|
327 |
+
loop: asyncio.AbstractEventLoop,
|
328 |
+
) -> Iterator[AiohttpClient]:
|
329 |
+
"""Factory to create a TestClient instance.
|
330 |
+
|
331 |
+
aiohttp_client(app, **kwargs)
|
332 |
+
aiohttp_client(server, **kwargs)
|
333 |
+
aiohttp_client(raw_server, **kwargs)
|
334 |
+
"""
|
335 |
+
clients = []
|
336 |
+
|
337 |
+
async def go(
|
338 |
+
__param: Union[Application, BaseTestServer],
|
339 |
+
*args: Any,
|
340 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
341 |
+
**kwargs: Any
|
342 |
+
) -> TestClient:
|
343 |
+
|
344 |
+
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
345 |
+
__param, (Application, BaseTestServer)
|
346 |
+
):
|
347 |
+
__param = __param(loop, *args, **kwargs)
|
348 |
+
kwargs = {}
|
349 |
+
else:
|
350 |
+
assert not args, "args should be empty"
|
351 |
+
|
352 |
+
if isinstance(__param, Application):
|
353 |
+
server_kwargs = server_kwargs or {}
|
354 |
+
server = TestServer(__param, loop=loop, **server_kwargs)
|
355 |
+
client = TestClient(server, loop=loop, **kwargs)
|
356 |
+
elif isinstance(__param, BaseTestServer):
|
357 |
+
client = TestClient(__param, loop=loop, **kwargs)
|
358 |
+
else:
|
359 |
+
raise ValueError("Unknown argument type: %r" % type(__param))
|
360 |
+
|
361 |
+
await client.start_server()
|
362 |
+
clients.append(client)
|
363 |
+
return client
|
364 |
+
|
365 |
+
yield go
|
366 |
+
|
367 |
+
async def finalize() -> None:
|
368 |
+
while clients:
|
369 |
+
await clients.pop().close()
|
370 |
+
|
371 |
+
loop.run_until_complete(finalize())
|
372 |
+
|
373 |
+
|
374 |
+
@pytest.fixture
|
375 |
+
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
376 |
+
warnings.warn(
|
377 |
+
"Deprecated, use aiohttp_client fixture instead",
|
378 |
+
DeprecationWarning,
|
379 |
+
stacklevel=2,
|
380 |
+
)
|
381 |
+
return aiohttp_client
|
venv/lib/python3.10/site-packages/aiohttp/tcp_helpers.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Helper methods to tune a TCP connection"""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import socket
|
5 |
+
from contextlib import suppress
|
6 |
+
from typing import Optional # noqa
|
7 |
+
|
8 |
+
__all__ = ("tcp_keepalive", "tcp_nodelay")
|
9 |
+
|
10 |
+
|
11 |
+
if hasattr(socket, "SO_KEEPALIVE"):
|
12 |
+
|
13 |
+
def tcp_keepalive(transport: asyncio.Transport) -> None:
|
14 |
+
sock = transport.get_extra_info("socket")
|
15 |
+
if sock is not None:
|
16 |
+
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
17 |
+
|
18 |
+
else:
|
19 |
+
|
20 |
+
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
|
21 |
+
pass
|
22 |
+
|
23 |
+
|
24 |
+
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
|
25 |
+
sock = transport.get_extra_info("socket")
|
26 |
+
|
27 |
+
if sock is None:
|
28 |
+
return
|
29 |
+
|
30 |
+
if sock.family not in (socket.AF_INET, socket.AF_INET6):
|
31 |
+
return
|
32 |
+
|
33 |
+
value = bool(value)
|
34 |
+
|
35 |
+
# socket may be closed already, on windows OSError get raised
|
36 |
+
with suppress(OSError):
|
37 |
+
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
|
venv/lib/python3.10/site-packages/aiohttp/web_protocol.py
ADDED
@@ -0,0 +1,698 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import asyncio.streams
|
3 |
+
import traceback
|
4 |
+
import warnings
|
5 |
+
from collections import deque
|
6 |
+
from contextlib import suppress
|
7 |
+
from html import escape as html_escape
|
8 |
+
from http import HTTPStatus
|
9 |
+
from logging import Logger
|
10 |
+
from typing import (
|
11 |
+
TYPE_CHECKING,
|
12 |
+
Any,
|
13 |
+
Awaitable,
|
14 |
+
Callable,
|
15 |
+
Deque,
|
16 |
+
Optional,
|
17 |
+
Sequence,
|
18 |
+
Tuple,
|
19 |
+
Type,
|
20 |
+
Union,
|
21 |
+
cast,
|
22 |
+
)
|
23 |
+
|
24 |
+
import attr
|
25 |
+
import yarl
|
26 |
+
|
27 |
+
from .abc import AbstractAccessLogger, AbstractStreamWriter
|
28 |
+
from .base_protocol import BaseProtocol
|
29 |
+
from .helpers import ceil_timeout, set_exception
|
30 |
+
from .http import (
|
31 |
+
HttpProcessingError,
|
32 |
+
HttpRequestParser,
|
33 |
+
HttpVersion10,
|
34 |
+
RawRequestMessage,
|
35 |
+
StreamWriter,
|
36 |
+
)
|
37 |
+
from .log import access_logger, server_logger
|
38 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
39 |
+
from .tcp_helpers import tcp_keepalive
|
40 |
+
from .web_exceptions import HTTPException
|
41 |
+
from .web_log import AccessLogger
|
42 |
+
from .web_request import BaseRequest
|
43 |
+
from .web_response import Response, StreamResponse
|
44 |
+
|
45 |
+
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
|
46 |
+
|
47 |
+
if TYPE_CHECKING:
|
48 |
+
from .web_server import Server
|
49 |
+
|
50 |
+
|
51 |
+
_RequestFactory = Callable[
|
52 |
+
[
|
53 |
+
RawRequestMessage,
|
54 |
+
StreamReader,
|
55 |
+
"RequestHandler",
|
56 |
+
AbstractStreamWriter,
|
57 |
+
"asyncio.Task[None]",
|
58 |
+
],
|
59 |
+
BaseRequest,
|
60 |
+
]
|
61 |
+
|
62 |
+
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
|
63 |
+
|
64 |
+
ERROR = RawRequestMessage(
|
65 |
+
"UNKNOWN",
|
66 |
+
"/",
|
67 |
+
HttpVersion10,
|
68 |
+
{}, # type: ignore[arg-type]
|
69 |
+
{}, # type: ignore[arg-type]
|
70 |
+
True,
|
71 |
+
None,
|
72 |
+
False,
|
73 |
+
False,
|
74 |
+
yarl.URL("/"),
|
75 |
+
)
|
76 |
+
|
77 |
+
|
78 |
+
class RequestPayloadError(Exception):
|
79 |
+
"""Payload parsing error."""
|
80 |
+
|
81 |
+
|
82 |
+
class PayloadAccessError(Exception):
|
83 |
+
"""Payload was accessed after response was sent."""
|
84 |
+
|
85 |
+
|
86 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
87 |
+
class _ErrInfo:
|
88 |
+
status: int
|
89 |
+
exc: BaseException
|
90 |
+
message: str
|
91 |
+
|
92 |
+
|
93 |
+
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
|
94 |
+
|
95 |
+
|
96 |
+
class RequestHandler(BaseProtocol):
|
97 |
+
"""HTTP protocol implementation.
|
98 |
+
|
99 |
+
RequestHandler handles incoming HTTP request. It reads request line,
|
100 |
+
request headers and request payload and calls handle_request() method.
|
101 |
+
By default it always returns with 404 response.
|
102 |
+
|
103 |
+
RequestHandler handles errors in incoming request, like bad
|
104 |
+
status line, bad headers or incomplete payload. If any error occurs,
|
105 |
+
connection gets closed.
|
106 |
+
|
107 |
+
keepalive_timeout -- number of seconds before closing
|
108 |
+
keep-alive connection
|
109 |
+
|
110 |
+
tcp_keepalive -- TCP keep-alive is on, default is on
|
111 |
+
|
112 |
+
debug -- enable debug mode
|
113 |
+
|
114 |
+
logger -- custom logger object
|
115 |
+
|
116 |
+
access_log_class -- custom class for access_logger
|
117 |
+
|
118 |
+
access_log -- custom logging object
|
119 |
+
|
120 |
+
access_log_format -- access log format string
|
121 |
+
|
122 |
+
loop -- Optional event loop
|
123 |
+
|
124 |
+
max_line_size -- Optional maximum header line size
|
125 |
+
|
126 |
+
max_field_size -- Optional maximum header field size
|
127 |
+
|
128 |
+
max_headers -- Optional maximum header size
|
129 |
+
|
130 |
+
timeout_ceil_threshold -- Optional value to specify
|
131 |
+
threshold to ceil() timeout
|
132 |
+
values
|
133 |
+
|
134 |
+
"""
|
135 |
+
|
136 |
+
KEEPALIVE_RESCHEDULE_DELAY = 1
|
137 |
+
|
138 |
+
__slots__ = (
|
139 |
+
"_request_count",
|
140 |
+
"_keepalive",
|
141 |
+
"_manager",
|
142 |
+
"_request_handler",
|
143 |
+
"_request_factory",
|
144 |
+
"_tcp_keepalive",
|
145 |
+
"_keepalive_time",
|
146 |
+
"_keepalive_handle",
|
147 |
+
"_keepalive_timeout",
|
148 |
+
"_lingering_time",
|
149 |
+
"_messages",
|
150 |
+
"_message_tail",
|
151 |
+
"_waiter",
|
152 |
+
"_task_handler",
|
153 |
+
"_upgrade",
|
154 |
+
"_payload_parser",
|
155 |
+
"_request_parser",
|
156 |
+
"_reading_paused",
|
157 |
+
"logger",
|
158 |
+
"debug",
|
159 |
+
"access_log",
|
160 |
+
"access_logger",
|
161 |
+
"_close",
|
162 |
+
"_force_close",
|
163 |
+
"_current_request",
|
164 |
+
"_timeout_ceil_threshold",
|
165 |
+
)
|
166 |
+
|
167 |
+
def __init__(
|
168 |
+
self,
|
169 |
+
manager: "Server",
|
170 |
+
*,
|
171 |
+
loop: asyncio.AbstractEventLoop,
|
172 |
+
keepalive_timeout: float = 75.0, # NGINX default is 75 secs
|
173 |
+
tcp_keepalive: bool = True,
|
174 |
+
logger: Logger = server_logger,
|
175 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
176 |
+
access_log: Logger = access_logger,
|
177 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
178 |
+
debug: bool = False,
|
179 |
+
max_line_size: int = 8190,
|
180 |
+
max_headers: int = 32768,
|
181 |
+
max_field_size: int = 8190,
|
182 |
+
lingering_time: float = 10.0,
|
183 |
+
read_bufsize: int = 2**16,
|
184 |
+
auto_decompress: bool = True,
|
185 |
+
timeout_ceil_threshold: float = 5,
|
186 |
+
):
|
187 |
+
super().__init__(loop)
|
188 |
+
|
189 |
+
self._request_count = 0
|
190 |
+
self._keepalive = False
|
191 |
+
self._current_request: Optional[BaseRequest] = None
|
192 |
+
self._manager: Optional[Server] = manager
|
193 |
+
self._request_handler: Optional[_RequestHandler] = manager.request_handler
|
194 |
+
self._request_factory: Optional[_RequestFactory] = manager.request_factory
|
195 |
+
|
196 |
+
self._tcp_keepalive = tcp_keepalive
|
197 |
+
# placeholder to be replaced on keepalive timeout setup
|
198 |
+
self._keepalive_time = 0.0
|
199 |
+
self._keepalive_handle: Optional[asyncio.Handle] = None
|
200 |
+
self._keepalive_timeout = keepalive_timeout
|
201 |
+
self._lingering_time = float(lingering_time)
|
202 |
+
|
203 |
+
self._messages: Deque[_MsgType] = deque()
|
204 |
+
self._message_tail = b""
|
205 |
+
|
206 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
207 |
+
self._task_handler: Optional[asyncio.Task[None]] = None
|
208 |
+
|
209 |
+
self._upgrade = False
|
210 |
+
self._payload_parser: Any = None
|
211 |
+
self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
|
212 |
+
self,
|
213 |
+
loop,
|
214 |
+
read_bufsize,
|
215 |
+
max_line_size=max_line_size,
|
216 |
+
max_field_size=max_field_size,
|
217 |
+
max_headers=max_headers,
|
218 |
+
payload_exception=RequestPayloadError,
|
219 |
+
auto_decompress=auto_decompress,
|
220 |
+
)
|
221 |
+
|
222 |
+
self._timeout_ceil_threshold: float = 5
|
223 |
+
try:
|
224 |
+
self._timeout_ceil_threshold = float(timeout_ceil_threshold)
|
225 |
+
except (TypeError, ValueError):
|
226 |
+
pass
|
227 |
+
|
228 |
+
self.logger = logger
|
229 |
+
self.debug = debug
|
230 |
+
self.access_log = access_log
|
231 |
+
if access_log:
|
232 |
+
self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
|
233 |
+
access_log, access_log_format
|
234 |
+
)
|
235 |
+
else:
|
236 |
+
self.access_logger = None
|
237 |
+
|
238 |
+
self._close = False
|
239 |
+
self._force_close = False
|
240 |
+
|
241 |
+
def __repr__(self) -> str:
|
242 |
+
return "<{} {}>".format(
|
243 |
+
self.__class__.__name__,
|
244 |
+
"connected" if self.transport is not None else "disconnected",
|
245 |
+
)
|
246 |
+
|
247 |
+
@property
|
248 |
+
def keepalive_timeout(self) -> float:
|
249 |
+
return self._keepalive_timeout
|
250 |
+
|
251 |
+
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
|
252 |
+
"""Do worker process exit preparations.
|
253 |
+
|
254 |
+
We need to clean up everything and stop accepting requests.
|
255 |
+
It is especially important for keep-alive connections.
|
256 |
+
"""
|
257 |
+
self._force_close = True
|
258 |
+
|
259 |
+
if self._keepalive_handle is not None:
|
260 |
+
self._keepalive_handle.cancel()
|
261 |
+
|
262 |
+
if self._waiter:
|
263 |
+
self._waiter.cancel()
|
264 |
+
|
265 |
+
# wait for handlers
|
266 |
+
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
|
267 |
+
async with ceil_timeout(timeout):
|
268 |
+
if self._current_request is not None:
|
269 |
+
self._current_request._cancel(asyncio.CancelledError())
|
270 |
+
|
271 |
+
if self._task_handler is not None and not self._task_handler.done():
|
272 |
+
await self._task_handler
|
273 |
+
|
274 |
+
# force-close non-idle handler
|
275 |
+
if self._task_handler is not None:
|
276 |
+
self._task_handler.cancel()
|
277 |
+
|
278 |
+
if self.transport is not None:
|
279 |
+
self.transport.close()
|
280 |
+
self.transport = None
|
281 |
+
|
282 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
283 |
+
super().connection_made(transport)
|
284 |
+
|
285 |
+
real_transport = cast(asyncio.Transport, transport)
|
286 |
+
if self._tcp_keepalive:
|
287 |
+
tcp_keepalive(real_transport)
|
288 |
+
|
289 |
+
self._task_handler = self._loop.create_task(self.start())
|
290 |
+
assert self._manager is not None
|
291 |
+
self._manager.connection_made(self, real_transport)
|
292 |
+
|
293 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
294 |
+
if self._manager is None:
|
295 |
+
return
|
296 |
+
self._manager.connection_lost(self, exc)
|
297 |
+
|
298 |
+
super().connection_lost(exc)
|
299 |
+
|
300 |
+
# Grab value before setting _manager to None.
|
301 |
+
handler_cancellation = self._manager.handler_cancellation
|
302 |
+
|
303 |
+
self._manager = None
|
304 |
+
self._force_close = True
|
305 |
+
self._request_factory = None
|
306 |
+
self._request_handler = None
|
307 |
+
self._request_parser = None
|
308 |
+
|
309 |
+
if self._keepalive_handle is not None:
|
310 |
+
self._keepalive_handle.cancel()
|
311 |
+
|
312 |
+
if self._current_request is not None:
|
313 |
+
if exc is None:
|
314 |
+
exc = ConnectionResetError("Connection lost")
|
315 |
+
self._current_request._cancel(exc)
|
316 |
+
|
317 |
+
if self._waiter is not None:
|
318 |
+
self._waiter.cancel()
|
319 |
+
|
320 |
+
if handler_cancellation and self._task_handler is not None:
|
321 |
+
self._task_handler.cancel()
|
322 |
+
|
323 |
+
self._task_handler = None
|
324 |
+
|
325 |
+
if self._payload_parser is not None:
|
326 |
+
self._payload_parser.feed_eof()
|
327 |
+
self._payload_parser = None
|
328 |
+
|
329 |
+
def set_parser(self, parser: Any) -> None:
|
330 |
+
# Actual type is WebReader
|
331 |
+
assert self._payload_parser is None
|
332 |
+
|
333 |
+
self._payload_parser = parser
|
334 |
+
|
335 |
+
if self._message_tail:
|
336 |
+
self._payload_parser.feed_data(self._message_tail)
|
337 |
+
self._message_tail = b""
|
338 |
+
|
339 |
+
def eof_received(self) -> None:
|
340 |
+
pass
|
341 |
+
|
342 |
+
def data_received(self, data: bytes) -> None:
|
343 |
+
if self._force_close or self._close:
|
344 |
+
return
|
345 |
+
# parse http messages
|
346 |
+
messages: Sequence[_MsgType]
|
347 |
+
if self._payload_parser is None and not self._upgrade:
|
348 |
+
assert self._request_parser is not None
|
349 |
+
try:
|
350 |
+
messages, upgraded, tail = self._request_parser.feed_data(data)
|
351 |
+
except HttpProcessingError as exc:
|
352 |
+
messages = [
|
353 |
+
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
|
354 |
+
]
|
355 |
+
upgraded = False
|
356 |
+
tail = b""
|
357 |
+
|
358 |
+
for msg, payload in messages or ():
|
359 |
+
self._request_count += 1
|
360 |
+
self._messages.append((msg, payload))
|
361 |
+
|
362 |
+
waiter = self._waiter
|
363 |
+
if messages and waiter is not None and not waiter.done():
|
364 |
+
# don't set result twice
|
365 |
+
waiter.set_result(None)
|
366 |
+
|
367 |
+
self._upgrade = upgraded
|
368 |
+
if upgraded and tail:
|
369 |
+
self._message_tail = tail
|
370 |
+
|
371 |
+
# no parser, just store
|
372 |
+
elif self._payload_parser is None and self._upgrade and data:
|
373 |
+
self._message_tail += data
|
374 |
+
|
375 |
+
# feed payload
|
376 |
+
elif data:
|
377 |
+
eof, tail = self._payload_parser.feed_data(data)
|
378 |
+
if eof:
|
379 |
+
self.close()
|
380 |
+
|
381 |
+
def keep_alive(self, val: bool) -> None:
|
382 |
+
"""Set keep-alive connection mode.
|
383 |
+
|
384 |
+
:param bool val: new state.
|
385 |
+
"""
|
386 |
+
self._keepalive = val
|
387 |
+
if self._keepalive_handle:
|
388 |
+
self._keepalive_handle.cancel()
|
389 |
+
self._keepalive_handle = None
|
390 |
+
|
391 |
+
def close(self) -> None:
|
392 |
+
"""Close connection.
|
393 |
+
|
394 |
+
Stop accepting new pipelining messages and close
|
395 |
+
connection when handlers done processing messages.
|
396 |
+
"""
|
397 |
+
self._close = True
|
398 |
+
if self._waiter:
|
399 |
+
self._waiter.cancel()
|
400 |
+
|
401 |
+
def force_close(self) -> None:
|
402 |
+
"""Forcefully close connection."""
|
403 |
+
self._force_close = True
|
404 |
+
if self._waiter:
|
405 |
+
self._waiter.cancel()
|
406 |
+
if self.transport is not None:
|
407 |
+
self.transport.close()
|
408 |
+
self.transport = None
|
409 |
+
|
410 |
+
def log_access(
|
411 |
+
self, request: BaseRequest, response: StreamResponse, time: float
|
412 |
+
) -> None:
|
413 |
+
if self.access_logger is not None:
|
414 |
+
self.access_logger.log(request, response, self._loop.time() - time)
|
415 |
+
|
416 |
+
def log_debug(self, *args: Any, **kw: Any) -> None:
|
417 |
+
if self.debug:
|
418 |
+
self.logger.debug(*args, **kw)
|
419 |
+
|
420 |
+
def log_exception(self, *args: Any, **kw: Any) -> None:
|
421 |
+
self.logger.exception(*args, **kw)
|
422 |
+
|
423 |
+
def _process_keepalive(self) -> None:
|
424 |
+
if self._force_close or not self._keepalive:
|
425 |
+
return
|
426 |
+
|
427 |
+
next = self._keepalive_time + self._keepalive_timeout
|
428 |
+
|
429 |
+
# handler in idle state
|
430 |
+
if self._waiter:
|
431 |
+
if self._loop.time() > next:
|
432 |
+
self.force_close()
|
433 |
+
return
|
434 |
+
|
435 |
+
# not all request handlers are done,
|
436 |
+
# reschedule itself to next second
|
437 |
+
self._keepalive_handle = self._loop.call_later(
|
438 |
+
self.KEEPALIVE_RESCHEDULE_DELAY,
|
439 |
+
self._process_keepalive,
|
440 |
+
)
|
441 |
+
|
442 |
+
async def _handle_request(
|
443 |
+
self,
|
444 |
+
request: BaseRequest,
|
445 |
+
start_time: float,
|
446 |
+
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
|
447 |
+
) -> Tuple[StreamResponse, bool]:
|
448 |
+
assert self._request_handler is not None
|
449 |
+
try:
|
450 |
+
try:
|
451 |
+
self._current_request = request
|
452 |
+
resp = await request_handler(request)
|
453 |
+
finally:
|
454 |
+
self._current_request = None
|
455 |
+
except HTTPException as exc:
|
456 |
+
resp = exc
|
457 |
+
reset = await self.finish_response(request, resp, start_time)
|
458 |
+
except asyncio.CancelledError:
|
459 |
+
raise
|
460 |
+
except asyncio.TimeoutError as exc:
|
461 |
+
self.log_debug("Request handler timed out.", exc_info=exc)
|
462 |
+
resp = self.handle_error(request, 504)
|
463 |
+
reset = await self.finish_response(request, resp, start_time)
|
464 |
+
except Exception as exc:
|
465 |
+
resp = self.handle_error(request, 500, exc)
|
466 |
+
reset = await self.finish_response(request, resp, start_time)
|
467 |
+
else:
|
468 |
+
# Deprecation warning (See #2415)
|
469 |
+
if getattr(resp, "__http_exception__", False):
|
470 |
+
warnings.warn(
|
471 |
+
"returning HTTPException object is deprecated "
|
472 |
+
"(#2415) and will be removed, "
|
473 |
+
"please raise the exception instead",
|
474 |
+
DeprecationWarning,
|
475 |
+
)
|
476 |
+
|
477 |
+
reset = await self.finish_response(request, resp, start_time)
|
478 |
+
|
479 |
+
return resp, reset
|
480 |
+
|
481 |
+
async def start(self) -> None:
|
482 |
+
"""Process incoming request.
|
483 |
+
|
484 |
+
It reads request line, request headers and request payload, then
|
485 |
+
calls handle_request() method. Subclass has to override
|
486 |
+
handle_request(). start() handles various exceptions in request
|
487 |
+
or response handling. Connection is being closed always unless
|
488 |
+
keep_alive(True) specified.
|
489 |
+
"""
|
490 |
+
loop = self._loop
|
491 |
+
handler = self._task_handler
|
492 |
+
assert handler is not None
|
493 |
+
manager = self._manager
|
494 |
+
assert manager is not None
|
495 |
+
keepalive_timeout = self._keepalive_timeout
|
496 |
+
resp = None
|
497 |
+
assert self._request_factory is not None
|
498 |
+
assert self._request_handler is not None
|
499 |
+
|
500 |
+
while not self._force_close:
|
501 |
+
if not self._messages:
|
502 |
+
try:
|
503 |
+
# wait for next request
|
504 |
+
self._waiter = loop.create_future()
|
505 |
+
await self._waiter
|
506 |
+
except asyncio.CancelledError:
|
507 |
+
break
|
508 |
+
finally:
|
509 |
+
self._waiter = None
|
510 |
+
|
511 |
+
message, payload = self._messages.popleft()
|
512 |
+
|
513 |
+
start = loop.time()
|
514 |
+
|
515 |
+
manager.requests_count += 1
|
516 |
+
writer = StreamWriter(self, loop)
|
517 |
+
if isinstance(message, _ErrInfo):
|
518 |
+
# make request_factory work
|
519 |
+
request_handler = self._make_error_handler(message)
|
520 |
+
message = ERROR
|
521 |
+
else:
|
522 |
+
request_handler = self._request_handler
|
523 |
+
|
524 |
+
request = self._request_factory(message, payload, self, writer, handler)
|
525 |
+
try:
|
526 |
+
# a new task is used for copy context vars (#3406)
|
527 |
+
task = self._loop.create_task(
|
528 |
+
self._handle_request(request, start, request_handler)
|
529 |
+
)
|
530 |
+
try:
|
531 |
+
resp, reset = await task
|
532 |
+
except (asyncio.CancelledError, ConnectionError):
|
533 |
+
self.log_debug("Ignored premature client disconnection")
|
534 |
+
break
|
535 |
+
|
536 |
+
# Drop the processed task from asyncio.Task.all_tasks() early
|
537 |
+
del task
|
538 |
+
if reset:
|
539 |
+
self.log_debug("Ignored premature client disconnection 2")
|
540 |
+
break
|
541 |
+
|
542 |
+
# notify server about keep-alive
|
543 |
+
self._keepalive = bool(resp.keep_alive)
|
544 |
+
|
545 |
+
# check payload
|
546 |
+
if not payload.is_eof():
|
547 |
+
lingering_time = self._lingering_time
|
548 |
+
if not self._force_close and lingering_time:
|
549 |
+
self.log_debug(
|
550 |
+
"Start lingering close timer for %s sec.", lingering_time
|
551 |
+
)
|
552 |
+
|
553 |
+
now = loop.time()
|
554 |
+
end_t = now + lingering_time
|
555 |
+
|
556 |
+
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
|
557 |
+
while not payload.is_eof() and now < end_t:
|
558 |
+
async with ceil_timeout(end_t - now):
|
559 |
+
# read and ignore
|
560 |
+
await payload.readany()
|
561 |
+
now = loop.time()
|
562 |
+
|
563 |
+
# if payload still uncompleted
|
564 |
+
if not payload.is_eof() and not self._force_close:
|
565 |
+
self.log_debug("Uncompleted request.")
|
566 |
+
self.close()
|
567 |
+
|
568 |
+
set_exception(payload, PayloadAccessError())
|
569 |
+
|
570 |
+
except asyncio.CancelledError:
|
571 |
+
self.log_debug("Ignored premature client disconnection ")
|
572 |
+
break
|
573 |
+
except RuntimeError as exc:
|
574 |
+
if self.debug:
|
575 |
+
self.log_exception("Unhandled runtime exception", exc_info=exc)
|
576 |
+
self.force_close()
|
577 |
+
except Exception as exc:
|
578 |
+
self.log_exception("Unhandled exception", exc_info=exc)
|
579 |
+
self.force_close()
|
580 |
+
finally:
|
581 |
+
if self.transport is None and resp is not None:
|
582 |
+
self.log_debug("Ignored premature client disconnection.")
|
583 |
+
elif not self._force_close:
|
584 |
+
if self._keepalive and not self._close:
|
585 |
+
# start keep-alive timer
|
586 |
+
if keepalive_timeout is not None:
|
587 |
+
now = self._loop.time()
|
588 |
+
self._keepalive_time = now
|
589 |
+
if self._keepalive_handle is None:
|
590 |
+
self._keepalive_handle = loop.call_at(
|
591 |
+
now + keepalive_timeout, self._process_keepalive
|
592 |
+
)
|
593 |
+
else:
|
594 |
+
break
|
595 |
+
|
596 |
+
# remove handler, close transport if no handlers left
|
597 |
+
if not self._force_close:
|
598 |
+
self._task_handler = None
|
599 |
+
if self.transport is not None:
|
600 |
+
self.transport.close()
|
601 |
+
|
602 |
+
async def finish_response(
|
603 |
+
self, request: BaseRequest, resp: StreamResponse, start_time: float
|
604 |
+
) -> bool:
|
605 |
+
"""Prepare the response and write_eof, then log access.
|
606 |
+
|
607 |
+
This has to
|
608 |
+
be called within the context of any exception so the access logger
|
609 |
+
can get exception information. Returns True if the client disconnects
|
610 |
+
prematurely.
|
611 |
+
"""
|
612 |
+
if self._request_parser is not None:
|
613 |
+
self._request_parser.set_upgraded(False)
|
614 |
+
self._upgrade = False
|
615 |
+
if self._message_tail:
|
616 |
+
self._request_parser.feed_data(self._message_tail)
|
617 |
+
self._message_tail = b""
|
618 |
+
try:
|
619 |
+
prepare_meth = resp.prepare
|
620 |
+
except AttributeError:
|
621 |
+
if resp is None:
|
622 |
+
raise RuntimeError("Missing return " "statement on request handler")
|
623 |
+
else:
|
624 |
+
raise RuntimeError(
|
625 |
+
"Web-handler should return "
|
626 |
+
"a response instance, "
|
627 |
+
"got {!r}".format(resp)
|
628 |
+
)
|
629 |
+
try:
|
630 |
+
await prepare_meth(request)
|
631 |
+
await resp.write_eof()
|
632 |
+
except ConnectionError:
|
633 |
+
self.log_access(request, resp, start_time)
|
634 |
+
return True
|
635 |
+
else:
|
636 |
+
self.log_access(request, resp, start_time)
|
637 |
+
return False
|
638 |
+
|
639 |
+
def handle_error(
|
640 |
+
self,
|
641 |
+
request: BaseRequest,
|
642 |
+
status: int = 500,
|
643 |
+
exc: Optional[BaseException] = None,
|
644 |
+
message: Optional[str] = None,
|
645 |
+
) -> StreamResponse:
|
646 |
+
"""Handle errors.
|
647 |
+
|
648 |
+
Returns HTTP response with specific status code. Logs additional
|
649 |
+
information. It always closes current connection.
|
650 |
+
"""
|
651 |
+
self.log_exception("Error handling request", exc_info=exc)
|
652 |
+
|
653 |
+
# some data already got sent, connection is broken
|
654 |
+
if request.writer.output_size > 0:
|
655 |
+
raise ConnectionError(
|
656 |
+
"Response is sent already, cannot send another response "
|
657 |
+
"with the error message"
|
658 |
+
)
|
659 |
+
|
660 |
+
ct = "text/plain"
|
661 |
+
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
|
662 |
+
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
|
663 |
+
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
|
664 |
+
tb = None
|
665 |
+
if self.debug:
|
666 |
+
with suppress(Exception):
|
667 |
+
tb = traceback.format_exc()
|
668 |
+
|
669 |
+
if "text/html" in request.headers.get("Accept", ""):
|
670 |
+
if tb:
|
671 |
+
tb = html_escape(tb)
|
672 |
+
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
|
673 |
+
message = (
|
674 |
+
"<html><head>"
|
675 |
+
"<title>{title}</title>"
|
676 |
+
"</head><body>\n<h1>{title}</h1>"
|
677 |
+
"\n{msg}\n</body></html>\n"
|
678 |
+
).format(title=title, msg=msg)
|
679 |
+
ct = "text/html"
|
680 |
+
else:
|
681 |
+
if tb:
|
682 |
+
msg = tb
|
683 |
+
message = title + "\n\n" + msg
|
684 |
+
|
685 |
+
resp = Response(status=status, text=message, content_type=ct)
|
686 |
+
resp.force_close()
|
687 |
+
|
688 |
+
return resp
|
689 |
+
|
690 |
+
def _make_error_handler(
|
691 |
+
self, err_info: _ErrInfo
|
692 |
+
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
|
693 |
+
async def handler(request: BaseRequest) -> StreamResponse:
|
694 |
+
return self.handle_error(
|
695 |
+
request, err_info.status, err_info.exc, err_info.message
|
696 |
+
)
|
697 |
+
|
698 |
+
return handler
|
venv/lib/python3.10/site-packages/aiohttp/web_server.py
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Low level HTTP server."""
|
2 |
+
import asyncio
|
3 |
+
from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa
|
4 |
+
|
5 |
+
from .abc import AbstractStreamWriter
|
6 |
+
from .helpers import get_running_loop
|
7 |
+
from .http_parser import RawRequestMessage
|
8 |
+
from .streams import StreamReader
|
9 |
+
from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
|
10 |
+
from .web_request import BaseRequest
|
11 |
+
|
12 |
+
__all__ = ("Server",)
|
13 |
+
|
14 |
+
|
15 |
+
class Server:
|
16 |
+
def __init__(
|
17 |
+
self,
|
18 |
+
handler: _RequestHandler,
|
19 |
+
*,
|
20 |
+
request_factory: Optional[_RequestFactory] = None,
|
21 |
+
handler_cancellation: bool = False,
|
22 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
23 |
+
**kwargs: Any
|
24 |
+
) -> None:
|
25 |
+
self._loop = get_running_loop(loop)
|
26 |
+
self._connections: Dict[RequestHandler, asyncio.Transport] = {}
|
27 |
+
self._kwargs = kwargs
|
28 |
+
self.requests_count = 0
|
29 |
+
self.request_handler = handler
|
30 |
+
self.request_factory = request_factory or self._make_request
|
31 |
+
self.handler_cancellation = handler_cancellation
|
32 |
+
|
33 |
+
@property
|
34 |
+
def connections(self) -> List[RequestHandler]:
|
35 |
+
return list(self._connections.keys())
|
36 |
+
|
37 |
+
def connection_made(
|
38 |
+
self, handler: RequestHandler, transport: asyncio.Transport
|
39 |
+
) -> None:
|
40 |
+
self._connections[handler] = transport
|
41 |
+
|
42 |
+
def connection_lost(
|
43 |
+
self, handler: RequestHandler, exc: Optional[BaseException] = None
|
44 |
+
) -> None:
|
45 |
+
if handler in self._connections:
|
46 |
+
del self._connections[handler]
|
47 |
+
|
48 |
+
def _make_request(
|
49 |
+
self,
|
50 |
+
message: RawRequestMessage,
|
51 |
+
payload: StreamReader,
|
52 |
+
protocol: RequestHandler,
|
53 |
+
writer: AbstractStreamWriter,
|
54 |
+
task: "asyncio.Task[None]",
|
55 |
+
) -> BaseRequest:
|
56 |
+
return BaseRequest(message, payload, protocol, writer, task, self._loop)
|
57 |
+
|
58 |
+
def pre_shutdown(self) -> None:
|
59 |
+
for conn in self._connections:
|
60 |
+
conn.close()
|
61 |
+
|
62 |
+
async def shutdown(self, timeout: Optional[float] = None) -> None:
|
63 |
+
coros = (conn.shutdown(timeout) for conn in self._connections)
|
64 |
+
await asyncio.gather(*coros)
|
65 |
+
self._connections.clear()
|
66 |
+
|
67 |
+
def __call__(self) -> RequestHandler:
|
68 |
+
try:
|
69 |
+
return RequestHandler(self, loop=self._loop, **self._kwargs)
|
70 |
+
except TypeError:
|
71 |
+
# Failsafe creation: remove all custom handler_args
|
72 |
+
kwargs = {
|
73 |
+
k: v
|
74 |
+
for k, v in self._kwargs.items()
|
75 |
+
if k in ["debug", "access_log_class"]
|
76 |
+
}
|
77 |
+
return RequestHandler(self, loop=self._loop, **kwargs)
|
venv/lib/python3.10/site-packages/pydantic/_internal/__init__.py
ADDED
File without changes
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (186 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_config.cpython-310.pyc
ADDED
Binary file (11.5 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-310.pyc
ADDED
Binary file (3.29 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-310.pyc
ADDED
Binary file (18.6 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_dataclasses.cpython-310.pyc
ADDED
Binary file (7.96 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-310.pyc
ADDED
Binary file (26.5 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_decorators_v1.cpython-310.pyc
ADDED
Binary file (7.34 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-310.pyc
ADDED
Binary file (15.4 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-310.pyc
ADDED
Binary file (3.42 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_fields.cpython-310.pyc
ADDED
Binary file (9.54 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-310.pyc
ADDED
Binary file (1.18 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-310.pyc
ADDED
Binary file (68.2 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_generics.cpython-310.pyc
ADDED
Binary file (17.7 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_git.cpython-310.pyc
ADDED
Binary file (1.19 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-310.pyc
ADDED
Binary file (399 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-310.pyc
ADDED
Binary file (10.8 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-310.pyc
ADDED
Binary file (4.59 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-310.pyc
ADDED
Binary file (22.8 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_repr.cpython-310.pyc
ADDED
Binary file (4.95 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-310.pyc
ADDED
Binary file (5.15 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_signature.cpython-310.pyc
ADDED
Binary file (4.71 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_std_types_schema.cpython-310.pyc
ADDED
Binary file (19.5 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-310.pyc
ADDED
Binary file (15.3 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_utils.cpython-310.pyc
ADDED
Binary file (12.3 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-310.pyc
ADDED
Binary file (2.58 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/__pycache__/_validators.cpython-310.pyc
ADDED
Binary file (8.49 kB). View file
|
|
venv/lib/python3.10/site-packages/pydantic/_internal/_config.py
ADDED
@@ -0,0 +1,334 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations as _annotations
|
2 |
+
|
3 |
+
import warnings
|
4 |
+
from contextlib import contextmanager
|
5 |
+
from typing import (
|
6 |
+
TYPE_CHECKING,
|
7 |
+
Any,
|
8 |
+
Callable,
|
9 |
+
cast,
|
10 |
+
)
|
11 |
+
|
12 |
+
from pydantic_core import core_schema
|
13 |
+
from typing_extensions import (
|
14 |
+
Literal,
|
15 |
+
Self,
|
16 |
+
)
|
17 |
+
|
18 |
+
from ..aliases import AliasGenerator
|
19 |
+
from ..config import ConfigDict, ExtraValues, JsonDict, JsonEncoder, JsonSchemaExtraCallable
|
20 |
+
from ..errors import PydanticUserError
|
21 |
+
from ..warnings import PydanticDeprecatedSince20
|
22 |
+
|
23 |
+
if not TYPE_CHECKING:
|
24 |
+
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
25 |
+
# and https://youtrack.jetbrains.com/issue/PY-51428
|
26 |
+
DeprecationWarning = PydanticDeprecatedSince20
|
27 |
+
|
28 |
+
if TYPE_CHECKING:
|
29 |
+
from .._internal._schema_generation_shared import GenerateSchema
|
30 |
+
|
31 |
+
DEPRECATION_MESSAGE = 'Support for class-based `config` is deprecated, use ConfigDict instead.'
|
32 |
+
|
33 |
+
|
34 |
+
class ConfigWrapper:
|
35 |
+
"""Internal wrapper for Config which exposes ConfigDict items as attributes."""
|
36 |
+
|
37 |
+
__slots__ = ('config_dict',)
|
38 |
+
|
39 |
+
config_dict: ConfigDict
|
40 |
+
|
41 |
+
# all annotations are copied directly from ConfigDict, and should be kept up to date, a test will fail if they
|
42 |
+
# stop matching
|
43 |
+
title: str | None
|
44 |
+
str_to_lower: bool
|
45 |
+
str_to_upper: bool
|
46 |
+
str_strip_whitespace: bool
|
47 |
+
str_min_length: int
|
48 |
+
str_max_length: int | None
|
49 |
+
extra: ExtraValues | None
|
50 |
+
frozen: bool
|
51 |
+
populate_by_name: bool
|
52 |
+
use_enum_values: bool
|
53 |
+
validate_assignment: bool
|
54 |
+
arbitrary_types_allowed: bool
|
55 |
+
from_attributes: bool
|
56 |
+
# whether to use the actual key provided in the data (e.g. alias or first alias for "field required" errors) instead of field_names
|
57 |
+
# to construct error `loc`s, default `True`
|
58 |
+
loc_by_alias: bool
|
59 |
+
alias_generator: Callable[[str], str] | AliasGenerator | None
|
60 |
+
ignored_types: tuple[type, ...]
|
61 |
+
allow_inf_nan: bool
|
62 |
+
json_schema_extra: JsonDict | JsonSchemaExtraCallable | None
|
63 |
+
json_encoders: dict[type[object], JsonEncoder] | None
|
64 |
+
|
65 |
+
# new in V2
|
66 |
+
strict: bool
|
67 |
+
# whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never'
|
68 |
+
revalidate_instances: Literal['always', 'never', 'subclass-instances']
|
69 |
+
ser_json_timedelta: Literal['iso8601', 'float']
|
70 |
+
ser_json_bytes: Literal['utf8', 'base64']
|
71 |
+
ser_json_inf_nan: Literal['null', 'constants']
|
72 |
+
# whether to validate default values during validation, default False
|
73 |
+
validate_default: bool
|
74 |
+
validate_return: bool
|
75 |
+
protected_namespaces: tuple[str, ...]
|
76 |
+
hide_input_in_errors: bool
|
77 |
+
defer_build: bool
|
78 |
+
plugin_settings: dict[str, object] | None
|
79 |
+
schema_generator: type[GenerateSchema] | None
|
80 |
+
json_schema_serialization_defaults_required: bool
|
81 |
+
json_schema_mode_override: Literal['validation', 'serialization', None]
|
82 |
+
coerce_numbers_to_str: bool
|
83 |
+
regex_engine: Literal['rust-regex', 'python-re']
|
84 |
+
validation_error_cause: bool
|
85 |
+
use_attribute_docstrings: bool
|
86 |
+
cache_strings: bool | Literal['all', 'keys', 'none']
|
87 |
+
|
88 |
+
def __init__(self, config: ConfigDict | dict[str, Any] | type[Any] | None, *, check: bool = True):
|
89 |
+
if check:
|
90 |
+
self.config_dict = prepare_config(config)
|
91 |
+
else:
|
92 |
+
self.config_dict = cast(ConfigDict, config)
|
93 |
+
|
94 |
+
@classmethod
|
95 |
+
def for_model(cls, bases: tuple[type[Any], ...], namespace: dict[str, Any], kwargs: dict[str, Any]) -> Self:
|
96 |
+
"""Build a new `ConfigWrapper` instance for a `BaseModel`.
|
97 |
+
|
98 |
+
The config wrapper built based on (in descending order of priority):
|
99 |
+
- options from `kwargs`
|
100 |
+
- options from the `namespace`
|
101 |
+
- options from the base classes (`bases`)
|
102 |
+
|
103 |
+
Args:
|
104 |
+
bases: A tuple of base classes.
|
105 |
+
namespace: The namespace of the class being created.
|
106 |
+
kwargs: The kwargs passed to the class being created.
|
107 |
+
|
108 |
+
Returns:
|
109 |
+
A `ConfigWrapper` instance for `BaseModel`.
|
110 |
+
"""
|
111 |
+
config_new = ConfigDict()
|
112 |
+
for base in bases:
|
113 |
+
config = getattr(base, 'model_config', None)
|
114 |
+
if config:
|
115 |
+
config_new.update(config.copy())
|
116 |
+
|
117 |
+
config_class_from_namespace = namespace.get('Config')
|
118 |
+
config_dict_from_namespace = namespace.get('model_config')
|
119 |
+
|
120 |
+
raw_annotations = namespace.get('__annotations__', {})
|
121 |
+
if raw_annotations.get('model_config') and not config_dict_from_namespace:
|
122 |
+
raise PydanticUserError(
|
123 |
+
'`model_config` cannot be used as a model field name. Use `model_config` for model configuration.',
|
124 |
+
code='model-config-invalid-field-name',
|
125 |
+
)
|
126 |
+
|
127 |
+
if config_class_from_namespace and config_dict_from_namespace:
|
128 |
+
raise PydanticUserError('"Config" and "model_config" cannot be used together', code='config-both')
|
129 |
+
|
130 |
+
config_from_namespace = config_dict_from_namespace or prepare_config(config_class_from_namespace)
|
131 |
+
|
132 |
+
config_new.update(config_from_namespace)
|
133 |
+
|
134 |
+
for k in list(kwargs.keys()):
|
135 |
+
if k in config_keys:
|
136 |
+
config_new[k] = kwargs.pop(k)
|
137 |
+
|
138 |
+
return cls(config_new)
|
139 |
+
|
140 |
+
# we don't show `__getattr__` to type checkers so missing attributes cause errors
|
141 |
+
if not TYPE_CHECKING: # pragma: no branch
|
142 |
+
|
143 |
+
def __getattr__(self, name: str) -> Any:
|
144 |
+
try:
|
145 |
+
return self.config_dict[name]
|
146 |
+
except KeyError:
|
147 |
+
try:
|
148 |
+
return config_defaults[name]
|
149 |
+
except KeyError:
|
150 |
+
raise AttributeError(f'Config has no attribute {name!r}') from None
|
151 |
+
|
152 |
+
def core_config(self, obj: Any) -> core_schema.CoreConfig:
|
153 |
+
"""Create a pydantic-core config, `obj` is just used to populate `title` if not set in config.
|
154 |
+
|
155 |
+
Pass `obj=None` if you do not want to attempt to infer the `title`.
|
156 |
+
|
157 |
+
We don't use getattr here since we don't want to populate with defaults.
|
158 |
+
|
159 |
+
Args:
|
160 |
+
obj: An object used to populate `title` if not set in config.
|
161 |
+
|
162 |
+
Returns:
|
163 |
+
A `CoreConfig` object created from config.
|
164 |
+
"""
|
165 |
+
|
166 |
+
def dict_not_none(**kwargs: Any) -> Any:
|
167 |
+
return {k: v for k, v in kwargs.items() if v is not None}
|
168 |
+
|
169 |
+
core_config = core_schema.CoreConfig(
|
170 |
+
**dict_not_none(
|
171 |
+
title=self.config_dict.get('title') or (obj and obj.__name__),
|
172 |
+
extra_fields_behavior=self.config_dict.get('extra'),
|
173 |
+
allow_inf_nan=self.config_dict.get('allow_inf_nan'),
|
174 |
+
populate_by_name=self.config_dict.get('populate_by_name'),
|
175 |
+
str_strip_whitespace=self.config_dict.get('str_strip_whitespace'),
|
176 |
+
str_to_lower=self.config_dict.get('str_to_lower'),
|
177 |
+
str_to_upper=self.config_dict.get('str_to_upper'),
|
178 |
+
strict=self.config_dict.get('strict'),
|
179 |
+
ser_json_timedelta=self.config_dict.get('ser_json_timedelta'),
|
180 |
+
ser_json_bytes=self.config_dict.get('ser_json_bytes'),
|
181 |
+
ser_json_inf_nan=self.config_dict.get('ser_json_inf_nan'),
|
182 |
+
from_attributes=self.config_dict.get('from_attributes'),
|
183 |
+
loc_by_alias=self.config_dict.get('loc_by_alias'),
|
184 |
+
revalidate_instances=self.config_dict.get('revalidate_instances'),
|
185 |
+
validate_default=self.config_dict.get('validate_default'),
|
186 |
+
str_max_length=self.config_dict.get('str_max_length'),
|
187 |
+
str_min_length=self.config_dict.get('str_min_length'),
|
188 |
+
hide_input_in_errors=self.config_dict.get('hide_input_in_errors'),
|
189 |
+
coerce_numbers_to_str=self.config_dict.get('coerce_numbers_to_str'),
|
190 |
+
regex_engine=self.config_dict.get('regex_engine'),
|
191 |
+
validation_error_cause=self.config_dict.get('validation_error_cause'),
|
192 |
+
cache_strings=self.config_dict.get('cache_strings'),
|
193 |
+
)
|
194 |
+
)
|
195 |
+
return core_config
|
196 |
+
|
197 |
+
def __repr__(self):
|
198 |
+
c = ', '.join(f'{k}={v!r}' for k, v in self.config_dict.items())
|
199 |
+
return f'ConfigWrapper({c})'
|
200 |
+
|
201 |
+
|
202 |
+
class ConfigWrapperStack:
|
203 |
+
"""A stack of `ConfigWrapper` instances."""
|
204 |
+
|
205 |
+
def __init__(self, config_wrapper: ConfigWrapper):
|
206 |
+
self._config_wrapper_stack: list[ConfigWrapper] = [config_wrapper]
|
207 |
+
|
208 |
+
@property
|
209 |
+
def tail(self) -> ConfigWrapper:
|
210 |
+
return self._config_wrapper_stack[-1]
|
211 |
+
|
212 |
+
@contextmanager
|
213 |
+
def push(self, config_wrapper: ConfigWrapper | ConfigDict | None):
|
214 |
+
if config_wrapper is None:
|
215 |
+
yield
|
216 |
+
return
|
217 |
+
|
218 |
+
if not isinstance(config_wrapper, ConfigWrapper):
|
219 |
+
config_wrapper = ConfigWrapper(config_wrapper, check=False)
|
220 |
+
|
221 |
+
self._config_wrapper_stack.append(config_wrapper)
|
222 |
+
try:
|
223 |
+
yield
|
224 |
+
finally:
|
225 |
+
self._config_wrapper_stack.pop()
|
226 |
+
|
227 |
+
|
228 |
+
config_defaults = ConfigDict(
|
229 |
+
title=None,
|
230 |
+
str_to_lower=False,
|
231 |
+
str_to_upper=False,
|
232 |
+
str_strip_whitespace=False,
|
233 |
+
str_min_length=0,
|
234 |
+
str_max_length=None,
|
235 |
+
# let the model / dataclass decide how to handle it
|
236 |
+
extra=None,
|
237 |
+
frozen=False,
|
238 |
+
populate_by_name=False,
|
239 |
+
use_enum_values=False,
|
240 |
+
validate_assignment=False,
|
241 |
+
arbitrary_types_allowed=False,
|
242 |
+
from_attributes=False,
|
243 |
+
loc_by_alias=True,
|
244 |
+
alias_generator=None,
|
245 |
+
ignored_types=(),
|
246 |
+
allow_inf_nan=True,
|
247 |
+
json_schema_extra=None,
|
248 |
+
strict=False,
|
249 |
+
revalidate_instances='never',
|
250 |
+
ser_json_timedelta='iso8601',
|
251 |
+
ser_json_bytes='utf8',
|
252 |
+
ser_json_inf_nan='null',
|
253 |
+
validate_default=False,
|
254 |
+
validate_return=False,
|
255 |
+
protected_namespaces=('model_',),
|
256 |
+
hide_input_in_errors=False,
|
257 |
+
json_encoders=None,
|
258 |
+
defer_build=False,
|
259 |
+
plugin_settings=None,
|
260 |
+
schema_generator=None,
|
261 |
+
json_schema_serialization_defaults_required=False,
|
262 |
+
json_schema_mode_override=None,
|
263 |
+
coerce_numbers_to_str=False,
|
264 |
+
regex_engine='rust-regex',
|
265 |
+
validation_error_cause=False,
|
266 |
+
use_attribute_docstrings=False,
|
267 |
+
cache_strings=True,
|
268 |
+
)
|
269 |
+
|
270 |
+
|
271 |
+
def prepare_config(config: ConfigDict | dict[str, Any] | type[Any] | None) -> ConfigDict:
|
272 |
+
"""Create a `ConfigDict` instance from an existing dict, a class (e.g. old class-based config) or None.
|
273 |
+
|
274 |
+
Args:
|
275 |
+
config: The input config.
|
276 |
+
|
277 |
+
Returns:
|
278 |
+
A ConfigDict object created from config.
|
279 |
+
"""
|
280 |
+
if config is None:
|
281 |
+
return ConfigDict()
|
282 |
+
|
283 |
+
if not isinstance(config, dict):
|
284 |
+
warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning)
|
285 |
+
config = {k: getattr(config, k) for k in dir(config) if not k.startswith('__')}
|
286 |
+
|
287 |
+
config_dict = cast(ConfigDict, config)
|
288 |
+
check_deprecated(config_dict)
|
289 |
+
return config_dict
|
290 |
+
|
291 |
+
|
292 |
+
config_keys = set(ConfigDict.__annotations__.keys())
|
293 |
+
|
294 |
+
|
295 |
+
V2_REMOVED_KEYS = {
|
296 |
+
'allow_mutation',
|
297 |
+
'error_msg_templates',
|
298 |
+
'fields',
|
299 |
+
'getter_dict',
|
300 |
+
'smart_union',
|
301 |
+
'underscore_attrs_are_private',
|
302 |
+
'json_loads',
|
303 |
+
'json_dumps',
|
304 |
+
'copy_on_model_validation',
|
305 |
+
'post_init_call',
|
306 |
+
}
|
307 |
+
V2_RENAMED_KEYS = {
|
308 |
+
'allow_population_by_field_name': 'populate_by_name',
|
309 |
+
'anystr_lower': 'str_to_lower',
|
310 |
+
'anystr_strip_whitespace': 'str_strip_whitespace',
|
311 |
+
'anystr_upper': 'str_to_upper',
|
312 |
+
'keep_untouched': 'ignored_types',
|
313 |
+
'max_anystr_length': 'str_max_length',
|
314 |
+
'min_anystr_length': 'str_min_length',
|
315 |
+
'orm_mode': 'from_attributes',
|
316 |
+
'schema_extra': 'json_schema_extra',
|
317 |
+
'validate_all': 'validate_default',
|
318 |
+
}
|
319 |
+
|
320 |
+
|
321 |
+
def check_deprecated(config_dict: ConfigDict) -> None:
|
322 |
+
"""Check for deprecated config keys and warn the user.
|
323 |
+
|
324 |
+
Args:
|
325 |
+
config_dict: The input config.
|
326 |
+
"""
|
327 |
+
deprecated_removed_keys = V2_REMOVED_KEYS & config_dict.keys()
|
328 |
+
deprecated_renamed_keys = V2_RENAMED_KEYS.keys() & config_dict.keys()
|
329 |
+
if deprecated_removed_keys or deprecated_renamed_keys:
|
330 |
+
renamings = {k: V2_RENAMED_KEYS[k] for k in sorted(deprecated_renamed_keys)}
|
331 |
+
renamed_bullets = [f'* {k!r} has been renamed to {v!r}' for k, v in renamings.items()]
|
332 |
+
removed_bullets = [f'* {k!r} has been removed' for k in sorted(deprecated_removed_keys)]
|
333 |
+
message = '\n'.join(['Valid config keys have changed in V2:'] + renamed_bullets + removed_bullets)
|
334 |
+
warnings.warn(message, UserWarning)
|
venv/lib/python3.10/site-packages/pydantic/_internal/_core_metadata.py
ADDED
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations as _annotations
|
2 |
+
|
3 |
+
import typing
|
4 |
+
from typing import Any
|
5 |
+
|
6 |
+
import typing_extensions
|
7 |
+
|
8 |
+
if typing.TYPE_CHECKING:
|
9 |
+
from ._schema_generation_shared import (
|
10 |
+
CoreSchemaOrField as CoreSchemaOrField,
|
11 |
+
)
|
12 |
+
from ._schema_generation_shared import (
|
13 |
+
GetJsonSchemaFunction,
|
14 |
+
)
|
15 |
+
|
16 |
+
|
17 |
+
class CoreMetadata(typing_extensions.TypedDict, total=False):
|
18 |
+
"""A `TypedDict` for holding the metadata dict of the schema.
|
19 |
+
|
20 |
+
Attributes:
|
21 |
+
pydantic_js_functions: List of JSON schema functions.
|
22 |
+
pydantic_js_prefer_positional_arguments: Whether JSON schema generator will
|
23 |
+
prefer positional over keyword arguments for an 'arguments' schema.
|
24 |
+
"""
|
25 |
+
|
26 |
+
pydantic_js_functions: list[GetJsonSchemaFunction]
|
27 |
+
pydantic_js_annotation_functions: list[GetJsonSchemaFunction]
|
28 |
+
|
29 |
+
# If `pydantic_js_prefer_positional_arguments` is True, the JSON schema generator will
|
30 |
+
# prefer positional over keyword arguments for an 'arguments' schema.
|
31 |
+
pydantic_js_prefer_positional_arguments: bool | None
|
32 |
+
|
33 |
+
pydantic_typed_dict_cls: type[Any] | None # TODO: Consider moving this into the pydantic-core TypedDictSchema
|
34 |
+
|
35 |
+
|
36 |
+
class CoreMetadataHandler:
|
37 |
+
"""Because the metadata field in pydantic_core is of type `Any`, we can't assume much about its contents.
|
38 |
+
|
39 |
+
This class is used to interact with the metadata field on a CoreSchema object in a consistent
|
40 |
+
way throughout pydantic.
|
41 |
+
"""
|
42 |
+
|
43 |
+
__slots__ = ('_schema',)
|
44 |
+
|
45 |
+
def __init__(self, schema: CoreSchemaOrField):
|
46 |
+
self._schema = schema
|
47 |
+
|
48 |
+
metadata = schema.get('metadata')
|
49 |
+
if metadata is None:
|
50 |
+
schema['metadata'] = CoreMetadata()
|
51 |
+
elif not isinstance(metadata, dict):
|
52 |
+
raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')
|
53 |
+
|
54 |
+
@property
|
55 |
+
def metadata(self) -> CoreMetadata:
|
56 |
+
"""Retrieves the metadata dict from the schema, initializing it to a dict if it is None
|
57 |
+
and raises an error if it is not a dict.
|
58 |
+
"""
|
59 |
+
metadata = self._schema.get('metadata')
|
60 |
+
if metadata is None:
|
61 |
+
self._schema['metadata'] = metadata = CoreMetadata()
|
62 |
+
if not isinstance(metadata, dict):
|
63 |
+
raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')
|
64 |
+
return metadata
|
65 |
+
|
66 |
+
|
67 |
+
def build_metadata_dict(
|
68 |
+
*, # force keyword arguments to make it easier to modify this signature in a backwards-compatible way
|
69 |
+
js_functions: list[GetJsonSchemaFunction] | None = None,
|
70 |
+
js_annotation_functions: list[GetJsonSchemaFunction] | None = None,
|
71 |
+
js_prefer_positional_arguments: bool | None = None,
|
72 |
+
typed_dict_cls: type[Any] | None = None,
|
73 |
+
initial_metadata: Any | None = None,
|
74 |
+
) -> Any:
|
75 |
+
"""Builds a dict to use as the metadata field of a CoreSchema object in a manner that is consistent
|
76 |
+
with the CoreMetadataHandler class.
|
77 |
+
"""
|
78 |
+
if initial_metadata is not None and not isinstance(initial_metadata, dict):
|
79 |
+
raise TypeError(f'CoreSchema metadata should be a dict; got {initial_metadata!r}.')
|
80 |
+
|
81 |
+
metadata = CoreMetadata(
|
82 |
+
pydantic_js_functions=js_functions or [],
|
83 |
+
pydantic_js_annotation_functions=js_annotation_functions or [],
|
84 |
+
pydantic_js_prefer_positional_arguments=js_prefer_positional_arguments,
|
85 |
+
pydantic_typed_dict_cls=typed_dict_cls,
|
86 |
+
)
|
87 |
+
metadata = {k: v for k, v in metadata.items() if v is not None}
|
88 |
+
|
89 |
+
if initial_metadata is not None:
|
90 |
+
metadata = {**initial_metadata, **metadata}
|
91 |
+
|
92 |
+
return metadata
|
venv/lib/python3.10/site-packages/pydantic/_internal/_core_utils.py
ADDED
@@ -0,0 +1,568 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
from collections import defaultdict
|
5 |
+
from typing import (
|
6 |
+
Any,
|
7 |
+
Callable,
|
8 |
+
Hashable,
|
9 |
+
TypeVar,
|
10 |
+
Union,
|
11 |
+
)
|
12 |
+
|
13 |
+
from pydantic_core import CoreSchema, core_schema
|
14 |
+
from pydantic_core import validate_core_schema as _validate_core_schema
|
15 |
+
from typing_extensions import TypeAliasType, TypeGuard, get_args, get_origin
|
16 |
+
|
17 |
+
from . import _repr
|
18 |
+
from ._typing_extra import is_generic_alias
|
19 |
+
|
20 |
+
AnyFunctionSchema = Union[
|
21 |
+
core_schema.AfterValidatorFunctionSchema,
|
22 |
+
core_schema.BeforeValidatorFunctionSchema,
|
23 |
+
core_schema.WrapValidatorFunctionSchema,
|
24 |
+
core_schema.PlainValidatorFunctionSchema,
|
25 |
+
]
|
26 |
+
|
27 |
+
|
28 |
+
FunctionSchemaWithInnerSchema = Union[
|
29 |
+
core_schema.AfterValidatorFunctionSchema,
|
30 |
+
core_schema.BeforeValidatorFunctionSchema,
|
31 |
+
core_schema.WrapValidatorFunctionSchema,
|
32 |
+
]
|
33 |
+
|
34 |
+
CoreSchemaField = Union[
|
35 |
+
core_schema.ModelField, core_schema.DataclassField, core_schema.TypedDictField, core_schema.ComputedField
|
36 |
+
]
|
37 |
+
CoreSchemaOrField = Union[core_schema.CoreSchema, CoreSchemaField]
|
38 |
+
|
39 |
+
_CORE_SCHEMA_FIELD_TYPES = {'typed-dict-field', 'dataclass-field', 'model-field', 'computed-field'}
|
40 |
+
_FUNCTION_WITH_INNER_SCHEMA_TYPES = {'function-before', 'function-after', 'function-wrap'}
|
41 |
+
_LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES = {'list', 'set', 'frozenset'}
|
42 |
+
|
43 |
+
TAGGED_UNION_TAG_KEY = 'pydantic.internal.tagged_union_tag'
|
44 |
+
"""
|
45 |
+
Used in a `Tag` schema to specify the tag used for a discriminated union.
|
46 |
+
"""
|
47 |
+
HAS_INVALID_SCHEMAS_METADATA_KEY = 'pydantic.internal.invalid'
|
48 |
+
"""Used to mark a schema that is invalid because it refers to a definition that was not yet defined when the
|
49 |
+
schema was first encountered.
|
50 |
+
"""
|
51 |
+
|
52 |
+
|
53 |
+
def is_core_schema(
|
54 |
+
schema: CoreSchemaOrField,
|
55 |
+
) -> TypeGuard[CoreSchema]:
|
56 |
+
return schema['type'] not in _CORE_SCHEMA_FIELD_TYPES
|
57 |
+
|
58 |
+
|
59 |
+
def is_core_schema_field(
|
60 |
+
schema: CoreSchemaOrField,
|
61 |
+
) -> TypeGuard[CoreSchemaField]:
|
62 |
+
return schema['type'] in _CORE_SCHEMA_FIELD_TYPES
|
63 |
+
|
64 |
+
|
65 |
+
def is_function_with_inner_schema(
|
66 |
+
schema: CoreSchemaOrField,
|
67 |
+
) -> TypeGuard[FunctionSchemaWithInnerSchema]:
|
68 |
+
return schema['type'] in _FUNCTION_WITH_INNER_SCHEMA_TYPES
|
69 |
+
|
70 |
+
|
71 |
+
def is_list_like_schema_with_items_schema(
|
72 |
+
schema: CoreSchema,
|
73 |
+
) -> TypeGuard[core_schema.ListSchema | core_schema.SetSchema | core_schema.FrozenSetSchema]:
|
74 |
+
return schema['type'] in _LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES
|
75 |
+
|
76 |
+
|
77 |
+
def get_type_ref(type_: type[Any], args_override: tuple[type[Any], ...] | None = None) -> str:
|
78 |
+
"""Produces the ref to be used for this type by pydantic_core's core schemas.
|
79 |
+
|
80 |
+
This `args_override` argument was added for the purpose of creating valid recursive references
|
81 |
+
when creating generic models without needing to create a concrete class.
|
82 |
+
"""
|
83 |
+
origin = get_origin(type_) or type_
|
84 |
+
|
85 |
+
args = get_args(type_) if is_generic_alias(type_) else (args_override or ())
|
86 |
+
generic_metadata = getattr(type_, '__pydantic_generic_metadata__', None)
|
87 |
+
if generic_metadata:
|
88 |
+
origin = generic_metadata['origin'] or origin
|
89 |
+
args = generic_metadata['args'] or args
|
90 |
+
|
91 |
+
module_name = getattr(origin, '__module__', '<No __module__>')
|
92 |
+
if isinstance(origin, TypeAliasType):
|
93 |
+
type_ref = f'{module_name}.{origin.__name__}:{id(origin)}'
|
94 |
+
else:
|
95 |
+
try:
|
96 |
+
qualname = getattr(origin, '__qualname__', f'<No __qualname__: {origin}>')
|
97 |
+
except Exception:
|
98 |
+
qualname = getattr(origin, '__qualname__', '<No __qualname__>')
|
99 |
+
type_ref = f'{module_name}.{qualname}:{id(origin)}'
|
100 |
+
|
101 |
+
arg_refs: list[str] = []
|
102 |
+
for arg in args:
|
103 |
+
if isinstance(arg, str):
|
104 |
+
# Handle string literals as a special case; we may be able to remove this special handling if we
|
105 |
+
# wrap them in a ForwardRef at some point.
|
106 |
+
arg_ref = f'{arg}:str-{id(arg)}'
|
107 |
+
else:
|
108 |
+
arg_ref = f'{_repr.display_as_type(arg)}:{id(arg)}'
|
109 |
+
arg_refs.append(arg_ref)
|
110 |
+
if arg_refs:
|
111 |
+
type_ref = f'{type_ref}[{",".join(arg_refs)}]'
|
112 |
+
return type_ref
|
113 |
+
|
114 |
+
|
115 |
+
def get_ref(s: core_schema.CoreSchema) -> None | str:
|
116 |
+
"""Get the ref from the schema if it has one.
|
117 |
+
This exists just for type checking to work correctly.
|
118 |
+
"""
|
119 |
+
return s.get('ref', None)
|
120 |
+
|
121 |
+
|
122 |
+
def collect_definitions(schema: core_schema.CoreSchema) -> dict[str, core_schema.CoreSchema]:
|
123 |
+
defs: dict[str, CoreSchema] = {}
|
124 |
+
|
125 |
+
def _record_valid_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
126 |
+
ref = get_ref(s)
|
127 |
+
if ref:
|
128 |
+
defs[ref] = s
|
129 |
+
return recurse(s, _record_valid_refs)
|
130 |
+
|
131 |
+
walk_core_schema(schema, _record_valid_refs)
|
132 |
+
|
133 |
+
return defs
|
134 |
+
|
135 |
+
|
136 |
+
def define_expected_missing_refs(
|
137 |
+
schema: core_schema.CoreSchema, allowed_missing_refs: set[str]
|
138 |
+
) -> core_schema.CoreSchema | None:
|
139 |
+
if not allowed_missing_refs:
|
140 |
+
# in this case, there are no missing refs to potentially substitute, so there's no need to walk the schema
|
141 |
+
# this is a common case (will be hit for all non-generic models), so it's worth optimizing for
|
142 |
+
return None
|
143 |
+
|
144 |
+
refs = collect_definitions(schema).keys()
|
145 |
+
|
146 |
+
expected_missing_refs = allowed_missing_refs.difference(refs)
|
147 |
+
if expected_missing_refs:
|
148 |
+
definitions: list[core_schema.CoreSchema] = [
|
149 |
+
# TODO: Replace this with a (new) CoreSchema that, if present at any level, makes validation fail
|
150 |
+
# Issue: https://github.com/pydantic/pydantic-core/issues/619
|
151 |
+
core_schema.none_schema(ref=ref, metadata={HAS_INVALID_SCHEMAS_METADATA_KEY: True})
|
152 |
+
for ref in expected_missing_refs
|
153 |
+
]
|
154 |
+
return core_schema.definitions_schema(schema, definitions)
|
155 |
+
return None
|
156 |
+
|
157 |
+
|
158 |
+
def collect_invalid_schemas(schema: core_schema.CoreSchema) -> bool:
|
159 |
+
invalid = False
|
160 |
+
|
161 |
+
def _is_schema_valid(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
162 |
+
nonlocal invalid
|
163 |
+
if 'metadata' in s:
|
164 |
+
metadata = s['metadata']
|
165 |
+
if HAS_INVALID_SCHEMAS_METADATA_KEY in metadata:
|
166 |
+
invalid = metadata[HAS_INVALID_SCHEMAS_METADATA_KEY]
|
167 |
+
return s
|
168 |
+
return recurse(s, _is_schema_valid)
|
169 |
+
|
170 |
+
walk_core_schema(schema, _is_schema_valid)
|
171 |
+
return invalid
|
172 |
+
|
173 |
+
|
174 |
+
T = TypeVar('T')
|
175 |
+
|
176 |
+
|
177 |
+
Recurse = Callable[[core_schema.CoreSchema, 'Walk'], core_schema.CoreSchema]
|
178 |
+
Walk = Callable[[core_schema.CoreSchema, Recurse], core_schema.CoreSchema]
|
179 |
+
|
180 |
+
# TODO: Should we move _WalkCoreSchema into pydantic_core proper?
|
181 |
+
# Issue: https://github.com/pydantic/pydantic-core/issues/615
|
182 |
+
|
183 |
+
|
184 |
+
class _WalkCoreSchema:
|
185 |
+
def __init__(self):
|
186 |
+
self._schema_type_to_method = self._build_schema_type_to_method()
|
187 |
+
|
188 |
+
def _build_schema_type_to_method(self) -> dict[core_schema.CoreSchemaType, Recurse]:
|
189 |
+
mapping: dict[core_schema.CoreSchemaType, Recurse] = {}
|
190 |
+
key: core_schema.CoreSchemaType
|
191 |
+
for key in get_args(core_schema.CoreSchemaType):
|
192 |
+
method_name = f"handle_{key.replace('-', '_')}_schema"
|
193 |
+
mapping[key] = getattr(self, method_name, self._handle_other_schemas)
|
194 |
+
return mapping
|
195 |
+
|
196 |
+
def walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
197 |
+
return f(schema, self._walk)
|
198 |
+
|
199 |
+
def _walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
200 |
+
schema = self._schema_type_to_method[schema['type']](schema.copy(), f)
|
201 |
+
ser_schema: core_schema.SerSchema | None = schema.get('serialization') # type: ignore
|
202 |
+
if ser_schema:
|
203 |
+
schema['serialization'] = self._handle_ser_schemas(ser_schema, f)
|
204 |
+
return schema
|
205 |
+
|
206 |
+
def _handle_other_schemas(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
207 |
+
sub_schema = schema.get('schema', None)
|
208 |
+
if sub_schema is not None:
|
209 |
+
schema['schema'] = self.walk(sub_schema, f) # type: ignore
|
210 |
+
return schema
|
211 |
+
|
212 |
+
def _handle_ser_schemas(self, ser_schema: core_schema.SerSchema, f: Walk) -> core_schema.SerSchema:
|
213 |
+
schema: core_schema.CoreSchema | None = ser_schema.get('schema', None)
|
214 |
+
if schema is not None:
|
215 |
+
ser_schema['schema'] = self.walk(schema, f) # type: ignore
|
216 |
+
return_schema: core_schema.CoreSchema | None = ser_schema.get('return_schema', None)
|
217 |
+
if return_schema is not None:
|
218 |
+
ser_schema['return_schema'] = self.walk(return_schema, f) # type: ignore
|
219 |
+
return ser_schema
|
220 |
+
|
221 |
+
def handle_definitions_schema(self, schema: core_schema.DefinitionsSchema, f: Walk) -> core_schema.CoreSchema:
|
222 |
+
new_definitions: list[core_schema.CoreSchema] = []
|
223 |
+
for definition in schema['definitions']:
|
224 |
+
if 'schema_ref' in definition and 'ref' in definition:
|
225 |
+
# This indicates a purposely indirect reference
|
226 |
+
# We want to keep such references around for implications related to JSON schema, etc.:
|
227 |
+
new_definitions.append(definition)
|
228 |
+
# However, we still need to walk the referenced definition:
|
229 |
+
self.walk(definition, f)
|
230 |
+
continue
|
231 |
+
|
232 |
+
updated_definition = self.walk(definition, f)
|
233 |
+
if 'ref' in updated_definition:
|
234 |
+
# If the updated definition schema doesn't have a 'ref', it shouldn't go in the definitions
|
235 |
+
# This is most likely to happen due to replacing something with a definition reference, in
|
236 |
+
# which case it should certainly not go in the definitions list
|
237 |
+
new_definitions.append(updated_definition)
|
238 |
+
new_inner_schema = self.walk(schema['schema'], f)
|
239 |
+
|
240 |
+
if not new_definitions and len(schema) == 3:
|
241 |
+
# This means we'd be returning a "trivial" definitions schema that just wrapped the inner schema
|
242 |
+
return new_inner_schema
|
243 |
+
|
244 |
+
new_schema = schema.copy()
|
245 |
+
new_schema['schema'] = new_inner_schema
|
246 |
+
new_schema['definitions'] = new_definitions
|
247 |
+
return new_schema
|
248 |
+
|
249 |
+
def handle_list_schema(self, schema: core_schema.ListSchema, f: Walk) -> core_schema.CoreSchema:
|
250 |
+
items_schema = schema.get('items_schema')
|
251 |
+
if items_schema is not None:
|
252 |
+
schema['items_schema'] = self.walk(items_schema, f)
|
253 |
+
return schema
|
254 |
+
|
255 |
+
def handle_set_schema(self, schema: core_schema.SetSchema, f: Walk) -> core_schema.CoreSchema:
|
256 |
+
items_schema = schema.get('items_schema')
|
257 |
+
if items_schema is not None:
|
258 |
+
schema['items_schema'] = self.walk(items_schema, f)
|
259 |
+
return schema
|
260 |
+
|
261 |
+
def handle_frozenset_schema(self, schema: core_schema.FrozenSetSchema, f: Walk) -> core_schema.CoreSchema:
|
262 |
+
items_schema = schema.get('items_schema')
|
263 |
+
if items_schema is not None:
|
264 |
+
schema['items_schema'] = self.walk(items_schema, f)
|
265 |
+
return schema
|
266 |
+
|
267 |
+
def handle_generator_schema(self, schema: core_schema.GeneratorSchema, f: Walk) -> core_schema.CoreSchema:
|
268 |
+
items_schema = schema.get('items_schema')
|
269 |
+
if items_schema is not None:
|
270 |
+
schema['items_schema'] = self.walk(items_schema, f)
|
271 |
+
return schema
|
272 |
+
|
273 |
+
def handle_tuple_schema(self, schema: core_schema.TupleSchema, f: Walk) -> core_schema.CoreSchema:
|
274 |
+
schema['items_schema'] = [self.walk(v, f) for v in schema['items_schema']]
|
275 |
+
return schema
|
276 |
+
|
277 |
+
def handle_dict_schema(self, schema: core_schema.DictSchema, f: Walk) -> core_schema.CoreSchema:
|
278 |
+
keys_schema = schema.get('keys_schema')
|
279 |
+
if keys_schema is not None:
|
280 |
+
schema['keys_schema'] = self.walk(keys_schema, f)
|
281 |
+
values_schema = schema.get('values_schema')
|
282 |
+
if values_schema:
|
283 |
+
schema['values_schema'] = self.walk(values_schema, f)
|
284 |
+
return schema
|
285 |
+
|
286 |
+
def handle_function_schema(self, schema: AnyFunctionSchema, f: Walk) -> core_schema.CoreSchema:
|
287 |
+
if not is_function_with_inner_schema(schema):
|
288 |
+
return schema
|
289 |
+
schema['schema'] = self.walk(schema['schema'], f)
|
290 |
+
return schema
|
291 |
+
|
292 |
+
def handle_union_schema(self, schema: core_schema.UnionSchema, f: Walk) -> core_schema.CoreSchema:
|
293 |
+
new_choices: list[CoreSchema | tuple[CoreSchema, str]] = []
|
294 |
+
for v in schema['choices']:
|
295 |
+
if isinstance(v, tuple):
|
296 |
+
new_choices.append((self.walk(v[0], f), v[1]))
|
297 |
+
else:
|
298 |
+
new_choices.append(self.walk(v, f))
|
299 |
+
schema['choices'] = new_choices
|
300 |
+
return schema
|
301 |
+
|
302 |
+
def handle_tagged_union_schema(self, schema: core_schema.TaggedUnionSchema, f: Walk) -> core_schema.CoreSchema:
|
303 |
+
new_choices: dict[Hashable, core_schema.CoreSchema] = {}
|
304 |
+
for k, v in schema['choices'].items():
|
305 |
+
new_choices[k] = v if isinstance(v, (str, int)) else self.walk(v, f)
|
306 |
+
schema['choices'] = new_choices
|
307 |
+
return schema
|
308 |
+
|
309 |
+
def handle_chain_schema(self, schema: core_schema.ChainSchema, f: Walk) -> core_schema.CoreSchema:
|
310 |
+
schema['steps'] = [self.walk(v, f) for v in schema['steps']]
|
311 |
+
return schema
|
312 |
+
|
313 |
+
def handle_lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema, f: Walk) -> core_schema.CoreSchema:
|
314 |
+
schema['lax_schema'] = self.walk(schema['lax_schema'], f)
|
315 |
+
schema['strict_schema'] = self.walk(schema['strict_schema'], f)
|
316 |
+
return schema
|
317 |
+
|
318 |
+
def handle_json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema, f: Walk) -> core_schema.CoreSchema:
|
319 |
+
schema['json_schema'] = self.walk(schema['json_schema'], f)
|
320 |
+
schema['python_schema'] = self.walk(schema['python_schema'], f)
|
321 |
+
return schema
|
322 |
+
|
323 |
+
def handle_model_fields_schema(self, schema: core_schema.ModelFieldsSchema, f: Walk) -> core_schema.CoreSchema:
|
324 |
+
extras_schema = schema.get('extras_schema')
|
325 |
+
if extras_schema is not None:
|
326 |
+
schema['extras_schema'] = self.walk(extras_schema, f)
|
327 |
+
replaced_fields: dict[str, core_schema.ModelField] = {}
|
328 |
+
replaced_computed_fields: list[core_schema.ComputedField] = []
|
329 |
+
for computed_field in schema.get('computed_fields', ()):
|
330 |
+
replaced_field = computed_field.copy()
|
331 |
+
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
332 |
+
replaced_computed_fields.append(replaced_field)
|
333 |
+
if replaced_computed_fields:
|
334 |
+
schema['computed_fields'] = replaced_computed_fields
|
335 |
+
for k, v in schema['fields'].items():
|
336 |
+
replaced_field = v.copy()
|
337 |
+
replaced_field['schema'] = self.walk(v['schema'], f)
|
338 |
+
replaced_fields[k] = replaced_field
|
339 |
+
schema['fields'] = replaced_fields
|
340 |
+
return schema
|
341 |
+
|
342 |
+
def handle_typed_dict_schema(self, schema: core_schema.TypedDictSchema, f: Walk) -> core_schema.CoreSchema:
|
343 |
+
extras_schema = schema.get('extras_schema')
|
344 |
+
if extras_schema is not None:
|
345 |
+
schema['extras_schema'] = self.walk(extras_schema, f)
|
346 |
+
replaced_computed_fields: list[core_schema.ComputedField] = []
|
347 |
+
for computed_field in schema.get('computed_fields', ()):
|
348 |
+
replaced_field = computed_field.copy()
|
349 |
+
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
350 |
+
replaced_computed_fields.append(replaced_field)
|
351 |
+
if replaced_computed_fields:
|
352 |
+
schema['computed_fields'] = replaced_computed_fields
|
353 |
+
replaced_fields: dict[str, core_schema.TypedDictField] = {}
|
354 |
+
for k, v in schema['fields'].items():
|
355 |
+
replaced_field = v.copy()
|
356 |
+
replaced_field['schema'] = self.walk(v['schema'], f)
|
357 |
+
replaced_fields[k] = replaced_field
|
358 |
+
schema['fields'] = replaced_fields
|
359 |
+
return schema
|
360 |
+
|
361 |
+
def handle_dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema, f: Walk) -> core_schema.CoreSchema:
|
362 |
+
replaced_fields: list[core_schema.DataclassField] = []
|
363 |
+
replaced_computed_fields: list[core_schema.ComputedField] = []
|
364 |
+
for computed_field in schema.get('computed_fields', ()):
|
365 |
+
replaced_field = computed_field.copy()
|
366 |
+
replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
|
367 |
+
replaced_computed_fields.append(replaced_field)
|
368 |
+
if replaced_computed_fields:
|
369 |
+
schema['computed_fields'] = replaced_computed_fields
|
370 |
+
for field in schema['fields']:
|
371 |
+
replaced_field = field.copy()
|
372 |
+
replaced_field['schema'] = self.walk(field['schema'], f)
|
373 |
+
replaced_fields.append(replaced_field)
|
374 |
+
schema['fields'] = replaced_fields
|
375 |
+
return schema
|
376 |
+
|
377 |
+
def handle_arguments_schema(self, schema: core_schema.ArgumentsSchema, f: Walk) -> core_schema.CoreSchema:
|
378 |
+
replaced_arguments_schema: list[core_schema.ArgumentsParameter] = []
|
379 |
+
for param in schema['arguments_schema']:
|
380 |
+
replaced_param = param.copy()
|
381 |
+
replaced_param['schema'] = self.walk(param['schema'], f)
|
382 |
+
replaced_arguments_schema.append(replaced_param)
|
383 |
+
schema['arguments_schema'] = replaced_arguments_schema
|
384 |
+
if 'var_args_schema' in schema:
|
385 |
+
schema['var_args_schema'] = self.walk(schema['var_args_schema'], f)
|
386 |
+
if 'var_kwargs_schema' in schema:
|
387 |
+
schema['var_kwargs_schema'] = self.walk(schema['var_kwargs_schema'], f)
|
388 |
+
return schema
|
389 |
+
|
390 |
+
def handle_call_schema(self, schema: core_schema.CallSchema, f: Walk) -> core_schema.CoreSchema:
|
391 |
+
schema['arguments_schema'] = self.walk(schema['arguments_schema'], f)
|
392 |
+
if 'return_schema' in schema:
|
393 |
+
schema['return_schema'] = self.walk(schema['return_schema'], f)
|
394 |
+
return schema
|
395 |
+
|
396 |
+
|
397 |
+
_dispatch = _WalkCoreSchema().walk
|
398 |
+
|
399 |
+
|
400 |
+
def walk_core_schema(schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
|
401 |
+
"""Recursively traverse a CoreSchema.
|
402 |
+
|
403 |
+
Args:
|
404 |
+
schema (core_schema.CoreSchema): The CoreSchema to process, it will not be modified.
|
405 |
+
f (Walk): A function to apply. This function takes two arguments:
|
406 |
+
1. The current CoreSchema that is being processed
|
407 |
+
(not the same one you passed into this function, one level down).
|
408 |
+
2. The "next" `f` to call. This lets you for example use `f=functools.partial(some_method, some_context)`
|
409 |
+
to pass data down the recursive calls without using globals or other mutable state.
|
410 |
+
|
411 |
+
Returns:
|
412 |
+
core_schema.CoreSchema: A processed CoreSchema.
|
413 |
+
"""
|
414 |
+
return f(schema.copy(), _dispatch)
|
415 |
+
|
416 |
+
|
417 |
+
def simplify_schema_references(schema: core_schema.CoreSchema) -> core_schema.CoreSchema: # noqa: C901
|
418 |
+
definitions: dict[str, core_schema.CoreSchema] = {}
|
419 |
+
ref_counts: dict[str, int] = defaultdict(int)
|
420 |
+
involved_in_recursion: dict[str, bool] = {}
|
421 |
+
current_recursion_ref_count: dict[str, int] = defaultdict(int)
|
422 |
+
|
423 |
+
def collect_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
424 |
+
if s['type'] == 'definitions':
|
425 |
+
for definition in s['definitions']:
|
426 |
+
ref = get_ref(definition)
|
427 |
+
assert ref is not None
|
428 |
+
if ref not in definitions:
|
429 |
+
definitions[ref] = definition
|
430 |
+
recurse(definition, collect_refs)
|
431 |
+
return recurse(s['schema'], collect_refs)
|
432 |
+
else:
|
433 |
+
ref = get_ref(s)
|
434 |
+
if ref is not None:
|
435 |
+
new = recurse(s, collect_refs)
|
436 |
+
new_ref = get_ref(new)
|
437 |
+
if new_ref:
|
438 |
+
definitions[new_ref] = new
|
439 |
+
return core_schema.definition_reference_schema(schema_ref=ref)
|
440 |
+
else:
|
441 |
+
return recurse(s, collect_refs)
|
442 |
+
|
443 |
+
schema = walk_core_schema(schema, collect_refs)
|
444 |
+
|
445 |
+
def count_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
446 |
+
if s['type'] != 'definition-ref':
|
447 |
+
return recurse(s, count_refs)
|
448 |
+
ref = s['schema_ref']
|
449 |
+
ref_counts[ref] += 1
|
450 |
+
|
451 |
+
if ref_counts[ref] >= 2:
|
452 |
+
# If this model is involved in a recursion this should be detected
|
453 |
+
# on its second encounter, we can safely stop the walk here.
|
454 |
+
if current_recursion_ref_count[ref] != 0:
|
455 |
+
involved_in_recursion[ref] = True
|
456 |
+
return s
|
457 |
+
|
458 |
+
current_recursion_ref_count[ref] += 1
|
459 |
+
recurse(definitions[ref], count_refs)
|
460 |
+
current_recursion_ref_count[ref] -= 1
|
461 |
+
return s
|
462 |
+
|
463 |
+
schema = walk_core_schema(schema, count_refs)
|
464 |
+
|
465 |
+
assert all(c == 0 for c in current_recursion_ref_count.values()), 'this is a bug! please report it'
|
466 |
+
|
467 |
+
def can_be_inlined(s: core_schema.DefinitionReferenceSchema, ref: str) -> bool:
|
468 |
+
if ref_counts[ref] > 1:
|
469 |
+
return False
|
470 |
+
if involved_in_recursion.get(ref, False):
|
471 |
+
return False
|
472 |
+
if 'serialization' in s:
|
473 |
+
return False
|
474 |
+
if 'metadata' in s:
|
475 |
+
metadata = s['metadata']
|
476 |
+
for k in (
|
477 |
+
'pydantic_js_functions',
|
478 |
+
'pydantic_js_annotation_functions',
|
479 |
+
'pydantic.internal.union_discriminator',
|
480 |
+
):
|
481 |
+
if k in metadata:
|
482 |
+
# we need to keep this as a ref
|
483 |
+
return False
|
484 |
+
return True
|
485 |
+
|
486 |
+
def inline_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
|
487 |
+
if s['type'] == 'definition-ref':
|
488 |
+
ref = s['schema_ref']
|
489 |
+
# Check if the reference is only used once, not involved in recursion and does not have
|
490 |
+
# any extra keys (like 'serialization')
|
491 |
+
if can_be_inlined(s, ref):
|
492 |
+
# Inline the reference by replacing the reference with the actual schema
|
493 |
+
new = definitions.pop(ref)
|
494 |
+
ref_counts[ref] -= 1 # because we just replaced it!
|
495 |
+
# put all other keys that were on the def-ref schema into the inlined version
|
496 |
+
# in particular this is needed for `serialization`
|
497 |
+
if 'serialization' in s:
|
498 |
+
new['serialization'] = s['serialization']
|
499 |
+
s = recurse(new, inline_refs)
|
500 |
+
return s
|
501 |
+
else:
|
502 |
+
return recurse(s, inline_refs)
|
503 |
+
else:
|
504 |
+
return recurse(s, inline_refs)
|
505 |
+
|
506 |
+
schema = walk_core_schema(schema, inline_refs)
|
507 |
+
|
508 |
+
def_values = [v for v in definitions.values() if ref_counts[v['ref']] > 0] # type: ignore
|
509 |
+
|
510 |
+
if def_values:
|
511 |
+
schema = core_schema.definitions_schema(schema=schema, definitions=def_values)
|
512 |
+
return schema
|
513 |
+
|
514 |
+
|
515 |
+
def _strip_metadata(schema: CoreSchema) -> CoreSchema:
|
516 |
+
def strip_metadata(s: CoreSchema, recurse: Recurse) -> CoreSchema:
|
517 |
+
s = s.copy()
|
518 |
+
s.pop('metadata', None)
|
519 |
+
if s['type'] == 'model-fields':
|
520 |
+
s = s.copy()
|
521 |
+
s['fields'] = {k: v.copy() for k, v in s['fields'].items()}
|
522 |
+
for field_name, field_schema in s['fields'].items():
|
523 |
+
field_schema.pop('metadata', None)
|
524 |
+
s['fields'][field_name] = field_schema
|
525 |
+
computed_fields = s.get('computed_fields', None)
|
526 |
+
if computed_fields:
|
527 |
+
s['computed_fields'] = [cf.copy() for cf in computed_fields]
|
528 |
+
for cf in computed_fields:
|
529 |
+
cf.pop('metadata', None)
|
530 |
+
else:
|
531 |
+
s.pop('computed_fields', None)
|
532 |
+
elif s['type'] == 'model':
|
533 |
+
# remove some defaults
|
534 |
+
if s.get('custom_init', True) is False:
|
535 |
+
s.pop('custom_init')
|
536 |
+
if s.get('root_model', True) is False:
|
537 |
+
s.pop('root_model')
|
538 |
+
if {'title'}.issuperset(s.get('config', {}).keys()):
|
539 |
+
s.pop('config', None)
|
540 |
+
|
541 |
+
return recurse(s, strip_metadata)
|
542 |
+
|
543 |
+
return walk_core_schema(schema, strip_metadata)
|
544 |
+
|
545 |
+
|
546 |
+
def pretty_print_core_schema(
|
547 |
+
schema: CoreSchema,
|
548 |
+
include_metadata: bool = False,
|
549 |
+
) -> None:
|
550 |
+
"""Pretty print a CoreSchema using rich.
|
551 |
+
This is intended for debugging purposes.
|
552 |
+
|
553 |
+
Args:
|
554 |
+
schema: The CoreSchema to print.
|
555 |
+
include_metadata: Whether to include metadata in the output. Defaults to `False`.
|
556 |
+
"""
|
557 |
+
from rich import print # type: ignore # install it manually in your dev env
|
558 |
+
|
559 |
+
if not include_metadata:
|
560 |
+
schema = _strip_metadata(schema)
|
561 |
+
|
562 |
+
return print(schema)
|
563 |
+
|
564 |
+
|
565 |
+
def validate_core_schema(schema: CoreSchema) -> CoreSchema:
|
566 |
+
if 'PYDANTIC_SKIP_VALIDATING_CORE_SCHEMAS' in os.environ:
|
567 |
+
return schema
|
568 |
+
return _validate_core_schema(schema)
|
venv/lib/python3.10/site-packages/pydantic/_internal/_dataclasses.py
ADDED
@@ -0,0 +1,230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Private logic for creating pydantic dataclasses."""
|
2 |
+
from __future__ import annotations as _annotations
|
3 |
+
|
4 |
+
import dataclasses
|
5 |
+
import typing
|
6 |
+
import warnings
|
7 |
+
from functools import partial, wraps
|
8 |
+
from typing import Any, Callable, ClassVar
|
9 |
+
|
10 |
+
from pydantic_core import (
|
11 |
+
ArgsKwargs,
|
12 |
+
SchemaSerializer,
|
13 |
+
SchemaValidator,
|
14 |
+
core_schema,
|
15 |
+
)
|
16 |
+
from typing_extensions import TypeGuard
|
17 |
+
|
18 |
+
from ..errors import PydanticUndefinedAnnotation
|
19 |
+
from ..fields import FieldInfo
|
20 |
+
from ..plugin._schema_validator import create_schema_validator
|
21 |
+
from ..warnings import PydanticDeprecatedSince20
|
22 |
+
from . import _config, _decorators, _typing_extra
|
23 |
+
from ._fields import collect_dataclass_fields
|
24 |
+
from ._generate_schema import GenerateSchema
|
25 |
+
from ._generics import get_standard_typevars_map
|
26 |
+
from ._mock_val_ser import set_dataclass_mocks
|
27 |
+
from ._schema_generation_shared import CallbackGetCoreSchemaHandler
|
28 |
+
from ._signature import generate_pydantic_signature
|
29 |
+
|
30 |
+
if typing.TYPE_CHECKING:
|
31 |
+
from ..config import ConfigDict
|
32 |
+
|
33 |
+
class StandardDataclass(typing.Protocol):
|
34 |
+
__dataclass_fields__: ClassVar[dict[str, Any]]
|
35 |
+
__dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams`
|
36 |
+
__post_init__: ClassVar[Callable[..., None]]
|
37 |
+
|
38 |
+
def __init__(self, *args: object, **kwargs: object) -> None:
|
39 |
+
pass
|
40 |
+
|
41 |
+
class PydanticDataclass(StandardDataclass, typing.Protocol):
|
42 |
+
"""A protocol containing attributes only available once a class has been decorated as a Pydantic dataclass.
|
43 |
+
|
44 |
+
Attributes:
|
45 |
+
__pydantic_config__: Pydantic-specific configuration settings for the dataclass.
|
46 |
+
__pydantic_complete__: Whether dataclass building is completed, or if there are still undefined fields.
|
47 |
+
__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
|
48 |
+
__pydantic_decorators__: Metadata containing the decorators defined on the dataclass.
|
49 |
+
__pydantic_fields__: Metadata about the fields defined on the dataclass.
|
50 |
+
__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the dataclass.
|
51 |
+
__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the dataclass.
|
52 |
+
"""
|
53 |
+
|
54 |
+
__pydantic_config__: ClassVar[ConfigDict]
|
55 |
+
__pydantic_complete__: ClassVar[bool]
|
56 |
+
__pydantic_core_schema__: ClassVar[core_schema.CoreSchema]
|
57 |
+
__pydantic_decorators__: ClassVar[_decorators.DecoratorInfos]
|
58 |
+
__pydantic_fields__: ClassVar[dict[str, FieldInfo]]
|
59 |
+
__pydantic_serializer__: ClassVar[SchemaSerializer]
|
60 |
+
__pydantic_validator__: ClassVar[SchemaValidator]
|
61 |
+
|
62 |
+
else:
|
63 |
+
# See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
|
64 |
+
# and https://youtrack.jetbrains.com/issue/PY-51428
|
65 |
+
DeprecationWarning = PydanticDeprecatedSince20
|
66 |
+
|
67 |
+
|
68 |
+
def set_dataclass_fields(
|
69 |
+
cls: type[StandardDataclass],
|
70 |
+
types_namespace: dict[str, Any] | None = None,
|
71 |
+
config_wrapper: _config.ConfigWrapper | None = None,
|
72 |
+
) -> None:
|
73 |
+
"""Collect and set `cls.__pydantic_fields__`.
|
74 |
+
|
75 |
+
Args:
|
76 |
+
cls: The class.
|
77 |
+
types_namespace: The types namespace, defaults to `None`.
|
78 |
+
config_wrapper: The config wrapper instance, defaults to `None`.
|
79 |
+
"""
|
80 |
+
typevars_map = get_standard_typevars_map(cls)
|
81 |
+
fields = collect_dataclass_fields(cls, types_namespace, typevars_map=typevars_map, config_wrapper=config_wrapper)
|
82 |
+
|
83 |
+
cls.__pydantic_fields__ = fields # type: ignore
|
84 |
+
|
85 |
+
|
86 |
+
def complete_dataclass(
|
87 |
+
cls: type[Any],
|
88 |
+
config_wrapper: _config.ConfigWrapper,
|
89 |
+
*,
|
90 |
+
raise_errors: bool = True,
|
91 |
+
types_namespace: dict[str, Any] | None,
|
92 |
+
) -> bool:
|
93 |
+
"""Finish building a pydantic dataclass.
|
94 |
+
|
95 |
+
This logic is called on a class which has already been wrapped in `dataclasses.dataclass()`.
|
96 |
+
|
97 |
+
This is somewhat analogous to `pydantic._internal._model_construction.complete_model_class`.
|
98 |
+
|
99 |
+
Args:
|
100 |
+
cls: The class.
|
101 |
+
config_wrapper: The config wrapper instance.
|
102 |
+
raise_errors: Whether to raise errors, defaults to `True`.
|
103 |
+
types_namespace: The types namespace.
|
104 |
+
|
105 |
+
Returns:
|
106 |
+
`True` if building a pydantic dataclass is successfully completed, `False` otherwise.
|
107 |
+
|
108 |
+
Raises:
|
109 |
+
PydanticUndefinedAnnotation: If `raise_error` is `True` and there is an undefined annotations.
|
110 |
+
"""
|
111 |
+
if hasattr(cls, '__post_init_post_parse__'):
|
112 |
+
warnings.warn(
|
113 |
+
'Support for `__post_init_post_parse__` has been dropped, the method will not be called', DeprecationWarning
|
114 |
+
)
|
115 |
+
|
116 |
+
if types_namespace is None:
|
117 |
+
types_namespace = _typing_extra.get_cls_types_namespace(cls)
|
118 |
+
|
119 |
+
set_dataclass_fields(cls, types_namespace, config_wrapper=config_wrapper)
|
120 |
+
|
121 |
+
typevars_map = get_standard_typevars_map(cls)
|
122 |
+
gen_schema = GenerateSchema(
|
123 |
+
config_wrapper,
|
124 |
+
types_namespace,
|
125 |
+
typevars_map,
|
126 |
+
)
|
127 |
+
|
128 |
+
# This needs to be called before we change the __init__
|
129 |
+
sig = generate_pydantic_signature(
|
130 |
+
init=cls.__init__,
|
131 |
+
fields=cls.__pydantic_fields__, # type: ignore
|
132 |
+
config_wrapper=config_wrapper,
|
133 |
+
is_dataclass=True,
|
134 |
+
)
|
135 |
+
|
136 |
+
# dataclass.__init__ must be defined here so its `__qualname__` can be changed since functions can't be copied.
|
137 |
+
def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
|
138 |
+
__tracebackhide__ = True
|
139 |
+
s = __dataclass_self__
|
140 |
+
s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
|
141 |
+
|
142 |
+
__init__.__qualname__ = f'{cls.__qualname__}.__init__'
|
143 |
+
|
144 |
+
cls.__init__ = __init__ # type: ignore
|
145 |
+
cls.__pydantic_config__ = config_wrapper.config_dict # type: ignore
|
146 |
+
cls.__signature__ = sig # type: ignore
|
147 |
+
get_core_schema = getattr(cls, '__get_pydantic_core_schema__', None)
|
148 |
+
try:
|
149 |
+
if get_core_schema:
|
150 |
+
schema = get_core_schema(
|
151 |
+
cls,
|
152 |
+
CallbackGetCoreSchemaHandler(
|
153 |
+
partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
|
154 |
+
gen_schema,
|
155 |
+
ref_mode='unpack',
|
156 |
+
),
|
157 |
+
)
|
158 |
+
else:
|
159 |
+
schema = gen_schema.generate_schema(cls, from_dunder_get_core_schema=False)
|
160 |
+
except PydanticUndefinedAnnotation as e:
|
161 |
+
if raise_errors:
|
162 |
+
raise
|
163 |
+
set_dataclass_mocks(cls, cls.__name__, f'`{e.name}`')
|
164 |
+
return False
|
165 |
+
|
166 |
+
core_config = config_wrapper.core_config(cls)
|
167 |
+
|
168 |
+
try:
|
169 |
+
schema = gen_schema.clean_schema(schema)
|
170 |
+
except gen_schema.CollectedInvalid:
|
171 |
+
set_dataclass_mocks(cls, cls.__name__, 'all referenced types')
|
172 |
+
return False
|
173 |
+
|
174 |
+
# We are about to set all the remaining required properties expected for this cast;
|
175 |
+
# __pydantic_decorators__ and __pydantic_fields__ should already be set
|
176 |
+
cls = typing.cast('type[PydanticDataclass]', cls)
|
177 |
+
# debug(schema)
|
178 |
+
|
179 |
+
cls.__pydantic_core_schema__ = schema
|
180 |
+
cls.__pydantic_validator__ = validator = create_schema_validator(
|
181 |
+
schema, cls, cls.__module__, cls.__qualname__, 'dataclass', core_config, config_wrapper.plugin_settings
|
182 |
+
)
|
183 |
+
cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
|
184 |
+
|
185 |
+
if config_wrapper.validate_assignment:
|
186 |
+
|
187 |
+
@wraps(cls.__setattr__)
|
188 |
+
def validated_setattr(instance: Any, field: str, value: str, /) -> None:
|
189 |
+
validator.validate_assignment(instance, field, value)
|
190 |
+
|
191 |
+
cls.__setattr__ = validated_setattr.__get__(None, cls) # type: ignore
|
192 |
+
|
193 |
+
return True
|
194 |
+
|
195 |
+
|
196 |
+
def is_builtin_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]:
|
197 |
+
"""Returns True if a class is a stdlib dataclass and *not* a pydantic dataclass.
|
198 |
+
|
199 |
+
We check that
|
200 |
+
- `_cls` is a dataclass
|
201 |
+
- `_cls` does not inherit from a processed pydantic dataclass (and thus have a `__pydantic_validator__`)
|
202 |
+
- `_cls` does not have any annotations that are not dataclass fields
|
203 |
+
e.g.
|
204 |
+
```py
|
205 |
+
import dataclasses
|
206 |
+
|
207 |
+
import pydantic.dataclasses
|
208 |
+
|
209 |
+
@dataclasses.dataclass
|
210 |
+
class A:
|
211 |
+
x: int
|
212 |
+
|
213 |
+
@pydantic.dataclasses.dataclass
|
214 |
+
class B(A):
|
215 |
+
y: int
|
216 |
+
```
|
217 |
+
In this case, when we first check `B`, we make an extra check and look at the annotations ('y'),
|
218 |
+
which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x')
|
219 |
+
|
220 |
+
Args:
|
221 |
+
cls: The class.
|
222 |
+
|
223 |
+
Returns:
|
224 |
+
`True` if the class is a stdlib dataclass, `False` otherwise.
|
225 |
+
"""
|
226 |
+
return (
|
227 |
+
dataclasses.is_dataclass(_cls)
|
228 |
+
and not hasattr(_cls, '__pydantic_validator__')
|
229 |
+
and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {})))
|
230 |
+
)
|
venv/lib/python3.10/site-packages/pydantic/_internal/_decorators.py
ADDED
@@ -0,0 +1,791 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Logic related to validators applied to models etc. via the `@field_validator` and `@model_validator` decorators."""
|
2 |
+
from __future__ import annotations as _annotations
|
3 |
+
|
4 |
+
from collections import deque
|
5 |
+
from dataclasses import dataclass, field
|
6 |
+
from functools import cached_property, partial, partialmethod
|
7 |
+
from inspect import Parameter, Signature, isdatadescriptor, ismethoddescriptor, signature
|
8 |
+
from itertools import islice
|
9 |
+
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Iterable, TypeVar, Union
|
10 |
+
|
11 |
+
from pydantic_core import PydanticUndefined, core_schema
|
12 |
+
from typing_extensions import Literal, TypeAlias, is_typeddict
|
13 |
+
|
14 |
+
from ..errors import PydanticUserError
|
15 |
+
from ._core_utils import get_type_ref
|
16 |
+
from ._internal_dataclass import slots_true
|
17 |
+
from ._typing_extra import get_function_type_hints
|
18 |
+
|
19 |
+
if TYPE_CHECKING:
|
20 |
+
from ..fields import ComputedFieldInfo
|
21 |
+
from ..functional_validators import FieldValidatorModes
|
22 |
+
|
23 |
+
|
24 |
+
@dataclass(**slots_true)
|
25 |
+
class ValidatorDecoratorInfo:
|
26 |
+
"""A container for data from `@validator` so that we can access it
|
27 |
+
while building the pydantic-core schema.
|
28 |
+
|
29 |
+
Attributes:
|
30 |
+
decorator_repr: A class variable representing the decorator string, '@validator'.
|
31 |
+
fields: A tuple of field names the validator should be called on.
|
32 |
+
mode: The proposed validator mode.
|
33 |
+
each_item: For complex objects (sets, lists etc.) whether to validate individual
|
34 |
+
elements rather than the whole object.
|
35 |
+
always: Whether this method and other validators should be called even if the value is missing.
|
36 |
+
check_fields: Whether to check that the fields actually exist on the model.
|
37 |
+
"""
|
38 |
+
|
39 |
+
decorator_repr: ClassVar[str] = '@validator'
|
40 |
+
|
41 |
+
fields: tuple[str, ...]
|
42 |
+
mode: Literal['before', 'after']
|
43 |
+
each_item: bool
|
44 |
+
always: bool
|
45 |
+
check_fields: bool | None
|
46 |
+
|
47 |
+
|
48 |
+
@dataclass(**slots_true)
|
49 |
+
class FieldValidatorDecoratorInfo:
|
50 |
+
"""A container for data from `@field_validator` so that we can access it
|
51 |
+
while building the pydantic-core schema.
|
52 |
+
|
53 |
+
Attributes:
|
54 |
+
decorator_repr: A class variable representing the decorator string, '@field_validator'.
|
55 |
+
fields: A tuple of field names the validator should be called on.
|
56 |
+
mode: The proposed validator mode.
|
57 |
+
check_fields: Whether to check that the fields actually exist on the model.
|
58 |
+
"""
|
59 |
+
|
60 |
+
decorator_repr: ClassVar[str] = '@field_validator'
|
61 |
+
|
62 |
+
fields: tuple[str, ...]
|
63 |
+
mode: FieldValidatorModes
|
64 |
+
check_fields: bool | None
|
65 |
+
|
66 |
+
|
67 |
+
@dataclass(**slots_true)
|
68 |
+
class RootValidatorDecoratorInfo:
|
69 |
+
"""A container for data from `@root_validator` so that we can access it
|
70 |
+
while building the pydantic-core schema.
|
71 |
+
|
72 |
+
Attributes:
|
73 |
+
decorator_repr: A class variable representing the decorator string, '@root_validator'.
|
74 |
+
mode: The proposed validator mode.
|
75 |
+
"""
|
76 |
+
|
77 |
+
decorator_repr: ClassVar[str] = '@root_validator'
|
78 |
+
mode: Literal['before', 'after']
|
79 |
+
|
80 |
+
|
81 |
+
@dataclass(**slots_true)
|
82 |
+
class FieldSerializerDecoratorInfo:
|
83 |
+
"""A container for data from `@field_serializer` so that we can access it
|
84 |
+
while building the pydantic-core schema.
|
85 |
+
|
86 |
+
Attributes:
|
87 |
+
decorator_repr: A class variable representing the decorator string, '@field_serializer'.
|
88 |
+
fields: A tuple of field names the serializer should be called on.
|
89 |
+
mode: The proposed serializer mode.
|
90 |
+
return_type: The type of the serializer's return value.
|
91 |
+
when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
|
92 |
+
and `'json-unless-none'`.
|
93 |
+
check_fields: Whether to check that the fields actually exist on the model.
|
94 |
+
"""
|
95 |
+
|
96 |
+
decorator_repr: ClassVar[str] = '@field_serializer'
|
97 |
+
fields: tuple[str, ...]
|
98 |
+
mode: Literal['plain', 'wrap']
|
99 |
+
return_type: Any
|
100 |
+
when_used: core_schema.WhenUsed
|
101 |
+
check_fields: bool | None
|
102 |
+
|
103 |
+
|
104 |
+
@dataclass(**slots_true)
|
105 |
+
class ModelSerializerDecoratorInfo:
|
106 |
+
"""A container for data from `@model_serializer` so that we can access it
|
107 |
+
while building the pydantic-core schema.
|
108 |
+
|
109 |
+
Attributes:
|
110 |
+
decorator_repr: A class variable representing the decorator string, '@model_serializer'.
|
111 |
+
mode: The proposed serializer mode.
|
112 |
+
return_type: The type of the serializer's return value.
|
113 |
+
when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
|
114 |
+
and `'json-unless-none'`.
|
115 |
+
"""
|
116 |
+
|
117 |
+
decorator_repr: ClassVar[str] = '@model_serializer'
|
118 |
+
mode: Literal['plain', 'wrap']
|
119 |
+
return_type: Any
|
120 |
+
when_used: core_schema.WhenUsed
|
121 |
+
|
122 |
+
|
123 |
+
@dataclass(**slots_true)
|
124 |
+
class ModelValidatorDecoratorInfo:
|
125 |
+
"""A container for data from `@model_validator` so that we can access it
|
126 |
+
while building the pydantic-core schema.
|
127 |
+
|
128 |
+
Attributes:
|
129 |
+
decorator_repr: A class variable representing the decorator string, '@model_serializer'.
|
130 |
+
mode: The proposed serializer mode.
|
131 |
+
"""
|
132 |
+
|
133 |
+
decorator_repr: ClassVar[str] = '@model_validator'
|
134 |
+
mode: Literal['wrap', 'before', 'after']
|
135 |
+
|
136 |
+
|
137 |
+
DecoratorInfo: TypeAlias = """Union[
|
138 |
+
ValidatorDecoratorInfo,
|
139 |
+
FieldValidatorDecoratorInfo,
|
140 |
+
RootValidatorDecoratorInfo,
|
141 |
+
FieldSerializerDecoratorInfo,
|
142 |
+
ModelSerializerDecoratorInfo,
|
143 |
+
ModelValidatorDecoratorInfo,
|
144 |
+
ComputedFieldInfo,
|
145 |
+
]"""
|
146 |
+
|
147 |
+
ReturnType = TypeVar('ReturnType')
|
148 |
+
DecoratedType: TypeAlias = (
|
149 |
+
'Union[classmethod[Any, Any, ReturnType], staticmethod[Any, ReturnType], Callable[..., ReturnType], property]'
|
150 |
+
)
|
151 |
+
|
152 |
+
|
153 |
+
@dataclass # can't use slots here since we set attributes on `__post_init__`
|
154 |
+
class PydanticDescriptorProxy(Generic[ReturnType]):
|
155 |
+
"""Wrap a classmethod, staticmethod, property or unbound function
|
156 |
+
and act as a descriptor that allows us to detect decorated items
|
157 |
+
from the class' attributes.
|
158 |
+
|
159 |
+
This class' __get__ returns the wrapped item's __get__ result,
|
160 |
+
which makes it transparent for classmethods and staticmethods.
|
161 |
+
|
162 |
+
Attributes:
|
163 |
+
wrapped: The decorator that has to be wrapped.
|
164 |
+
decorator_info: The decorator info.
|
165 |
+
shim: A wrapper function to wrap V1 style function.
|
166 |
+
"""
|
167 |
+
|
168 |
+
wrapped: DecoratedType[ReturnType]
|
169 |
+
decorator_info: DecoratorInfo
|
170 |
+
shim: Callable[[Callable[..., Any]], Callable[..., Any]] | None = None
|
171 |
+
|
172 |
+
def __post_init__(self):
|
173 |
+
for attr in 'setter', 'deleter':
|
174 |
+
if hasattr(self.wrapped, attr):
|
175 |
+
f = partial(self._call_wrapped_attr, name=attr)
|
176 |
+
setattr(self, attr, f)
|
177 |
+
|
178 |
+
def _call_wrapped_attr(self, func: Callable[[Any], None], *, name: str) -> PydanticDescriptorProxy[ReturnType]:
|
179 |
+
self.wrapped = getattr(self.wrapped, name)(func)
|
180 |
+
return self
|
181 |
+
|
182 |
+
def __get__(self, obj: object | None, obj_type: type[object] | None = None) -> PydanticDescriptorProxy[ReturnType]:
|
183 |
+
try:
|
184 |
+
return self.wrapped.__get__(obj, obj_type)
|
185 |
+
except AttributeError:
|
186 |
+
# not a descriptor, e.g. a partial object
|
187 |
+
return self.wrapped # type: ignore[return-value]
|
188 |
+
|
189 |
+
def __set_name__(self, instance: Any, name: str) -> None:
|
190 |
+
if hasattr(self.wrapped, '__set_name__'):
|
191 |
+
self.wrapped.__set_name__(instance, name) # pyright: ignore[reportFunctionMemberAccess]
|
192 |
+
|
193 |
+
def __getattr__(self, __name: str) -> Any:
|
194 |
+
"""Forward checks for __isabstractmethod__ and such."""
|
195 |
+
return getattr(self.wrapped, __name)
|
196 |
+
|
197 |
+
|
198 |
+
DecoratorInfoType = TypeVar('DecoratorInfoType', bound=DecoratorInfo)
|
199 |
+
|
200 |
+
|
201 |
+
@dataclass(**slots_true)
|
202 |
+
class Decorator(Generic[DecoratorInfoType]):
|
203 |
+
"""A generic container class to join together the decorator metadata
|
204 |
+
(metadata from decorator itself, which we have when the
|
205 |
+
decorator is called but not when we are building the core-schema)
|
206 |
+
and the bound function (which we have after the class itself is created).
|
207 |
+
|
208 |
+
Attributes:
|
209 |
+
cls_ref: The class ref.
|
210 |
+
cls_var_name: The decorated function name.
|
211 |
+
func: The decorated function.
|
212 |
+
shim: A wrapper function to wrap V1 style function.
|
213 |
+
info: The decorator info.
|
214 |
+
"""
|
215 |
+
|
216 |
+
cls_ref: str
|
217 |
+
cls_var_name: str
|
218 |
+
func: Callable[..., Any]
|
219 |
+
shim: Callable[[Any], Any] | None
|
220 |
+
info: DecoratorInfoType
|
221 |
+
|
222 |
+
@staticmethod
|
223 |
+
def build(
|
224 |
+
cls_: Any,
|
225 |
+
*,
|
226 |
+
cls_var_name: str,
|
227 |
+
shim: Callable[[Any], Any] | None,
|
228 |
+
info: DecoratorInfoType,
|
229 |
+
) -> Decorator[DecoratorInfoType]:
|
230 |
+
"""Build a new decorator.
|
231 |
+
|
232 |
+
Args:
|
233 |
+
cls_: The class.
|
234 |
+
cls_var_name: The decorated function name.
|
235 |
+
shim: A wrapper function to wrap V1 style function.
|
236 |
+
info: The decorator info.
|
237 |
+
|
238 |
+
Returns:
|
239 |
+
The new decorator instance.
|
240 |
+
"""
|
241 |
+
func = get_attribute_from_bases(cls_, cls_var_name)
|
242 |
+
if shim is not None:
|
243 |
+
func = shim(func)
|
244 |
+
func = unwrap_wrapped_function(func, unwrap_partial=False)
|
245 |
+
if not callable(func):
|
246 |
+
# This branch will get hit for classmethod properties
|
247 |
+
attribute = get_attribute_from_base_dicts(cls_, cls_var_name) # prevents the binding call to `__get__`
|
248 |
+
if isinstance(attribute, PydanticDescriptorProxy):
|
249 |
+
func = unwrap_wrapped_function(attribute.wrapped)
|
250 |
+
return Decorator(
|
251 |
+
cls_ref=get_type_ref(cls_),
|
252 |
+
cls_var_name=cls_var_name,
|
253 |
+
func=func,
|
254 |
+
shim=shim,
|
255 |
+
info=info,
|
256 |
+
)
|
257 |
+
|
258 |
+
def bind_to_cls(self, cls: Any) -> Decorator[DecoratorInfoType]:
|
259 |
+
"""Bind the decorator to a class.
|
260 |
+
|
261 |
+
Args:
|
262 |
+
cls: the class.
|
263 |
+
|
264 |
+
Returns:
|
265 |
+
The new decorator instance.
|
266 |
+
"""
|
267 |
+
return self.build(
|
268 |
+
cls,
|
269 |
+
cls_var_name=self.cls_var_name,
|
270 |
+
shim=self.shim,
|
271 |
+
info=self.info,
|
272 |
+
)
|
273 |
+
|
274 |
+
|
275 |
+
def get_bases(tp: type[Any]) -> tuple[type[Any], ...]:
|
276 |
+
"""Get the base classes of a class or typeddict.
|
277 |
+
|
278 |
+
Args:
|
279 |
+
tp: The type or class to get the bases.
|
280 |
+
|
281 |
+
Returns:
|
282 |
+
The base classes.
|
283 |
+
"""
|
284 |
+
if is_typeddict(tp):
|
285 |
+
return tp.__orig_bases__ # type: ignore
|
286 |
+
try:
|
287 |
+
return tp.__bases__
|
288 |
+
except AttributeError:
|
289 |
+
return ()
|
290 |
+
|
291 |
+
|
292 |
+
def mro(tp: type[Any]) -> tuple[type[Any], ...]:
|
293 |
+
"""Calculate the Method Resolution Order of bases using the C3 algorithm.
|
294 |
+
|
295 |
+
See https://www.python.org/download/releases/2.3/mro/
|
296 |
+
"""
|
297 |
+
# try to use the existing mro, for performance mainly
|
298 |
+
# but also because it helps verify the implementation below
|
299 |
+
if not is_typeddict(tp):
|
300 |
+
try:
|
301 |
+
return tp.__mro__
|
302 |
+
except AttributeError:
|
303 |
+
# GenericAlias and some other cases
|
304 |
+
pass
|
305 |
+
|
306 |
+
bases = get_bases(tp)
|
307 |
+
return (tp,) + mro_for_bases(bases)
|
308 |
+
|
309 |
+
|
310 |
+
def mro_for_bases(bases: tuple[type[Any], ...]) -> tuple[type[Any], ...]:
|
311 |
+
def merge_seqs(seqs: list[deque[type[Any]]]) -> Iterable[type[Any]]:
|
312 |
+
while True:
|
313 |
+
non_empty = [seq for seq in seqs if seq]
|
314 |
+
if not non_empty:
|
315 |
+
# Nothing left to process, we're done.
|
316 |
+
return
|
317 |
+
candidate: type[Any] | None = None
|
318 |
+
for seq in non_empty: # Find merge candidates among seq heads.
|
319 |
+
candidate = seq[0]
|
320 |
+
not_head = [s for s in non_empty if candidate in islice(s, 1, None)]
|
321 |
+
if not_head:
|
322 |
+
# Reject the candidate.
|
323 |
+
candidate = None
|
324 |
+
else:
|
325 |
+
break
|
326 |
+
if not candidate:
|
327 |
+
raise TypeError('Inconsistent hierarchy, no C3 MRO is possible')
|
328 |
+
yield candidate
|
329 |
+
for seq in non_empty:
|
330 |
+
# Remove candidate.
|
331 |
+
if seq[0] == candidate:
|
332 |
+
seq.popleft()
|
333 |
+
|
334 |
+
seqs = [deque(mro(base)) for base in bases] + [deque(bases)]
|
335 |
+
return tuple(merge_seqs(seqs))
|
336 |
+
|
337 |
+
|
338 |
+
_sentinel = object()
|
339 |
+
|
340 |
+
|
341 |
+
def get_attribute_from_bases(tp: type[Any] | tuple[type[Any], ...], name: str) -> Any:
|
342 |
+
"""Get the attribute from the next class in the MRO that has it,
|
343 |
+
aiming to simulate calling the method on the actual class.
|
344 |
+
|
345 |
+
The reason for iterating over the mro instead of just getting
|
346 |
+
the attribute (which would do that for us) is to support TypedDict,
|
347 |
+
which lacks a real __mro__, but can have a virtual one constructed
|
348 |
+
from its bases (as done here).
|
349 |
+
|
350 |
+
Args:
|
351 |
+
tp: The type or class to search for the attribute. If a tuple, this is treated as a set of base classes.
|
352 |
+
name: The name of the attribute to retrieve.
|
353 |
+
|
354 |
+
Returns:
|
355 |
+
Any: The attribute value, if found.
|
356 |
+
|
357 |
+
Raises:
|
358 |
+
AttributeError: If the attribute is not found in any class in the MRO.
|
359 |
+
"""
|
360 |
+
if isinstance(tp, tuple):
|
361 |
+
for base in mro_for_bases(tp):
|
362 |
+
attribute = base.__dict__.get(name, _sentinel)
|
363 |
+
if attribute is not _sentinel:
|
364 |
+
attribute_get = getattr(attribute, '__get__', None)
|
365 |
+
if attribute_get is not None:
|
366 |
+
return attribute_get(None, tp)
|
367 |
+
return attribute
|
368 |
+
raise AttributeError(f'{name} not found in {tp}')
|
369 |
+
else:
|
370 |
+
try:
|
371 |
+
return getattr(tp, name)
|
372 |
+
except AttributeError:
|
373 |
+
return get_attribute_from_bases(mro(tp), name)
|
374 |
+
|
375 |
+
|
376 |
+
def get_attribute_from_base_dicts(tp: type[Any], name: str) -> Any:
|
377 |
+
"""Get an attribute out of the `__dict__` following the MRO.
|
378 |
+
This prevents the call to `__get__` on the descriptor, and allows
|
379 |
+
us to get the original function for classmethod properties.
|
380 |
+
|
381 |
+
Args:
|
382 |
+
tp: The type or class to search for the attribute.
|
383 |
+
name: The name of the attribute to retrieve.
|
384 |
+
|
385 |
+
Returns:
|
386 |
+
Any: The attribute value, if found.
|
387 |
+
|
388 |
+
Raises:
|
389 |
+
KeyError: If the attribute is not found in any class's `__dict__` in the MRO.
|
390 |
+
"""
|
391 |
+
for base in reversed(mro(tp)):
|
392 |
+
if name in base.__dict__:
|
393 |
+
return base.__dict__[name]
|
394 |
+
return tp.__dict__[name] # raise the error
|
395 |
+
|
396 |
+
|
397 |
+
@dataclass(**slots_true)
|
398 |
+
class DecoratorInfos:
|
399 |
+
"""Mapping of name in the class namespace to decorator info.
|
400 |
+
|
401 |
+
note that the name in the class namespace is the function or attribute name
|
402 |
+
not the field name!
|
403 |
+
"""
|
404 |
+
|
405 |
+
validators: dict[str, Decorator[ValidatorDecoratorInfo]] = field(default_factory=dict)
|
406 |
+
field_validators: dict[str, Decorator[FieldValidatorDecoratorInfo]] = field(default_factory=dict)
|
407 |
+
root_validators: dict[str, Decorator[RootValidatorDecoratorInfo]] = field(default_factory=dict)
|
408 |
+
field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]] = field(default_factory=dict)
|
409 |
+
model_serializers: dict[str, Decorator[ModelSerializerDecoratorInfo]] = field(default_factory=dict)
|
410 |
+
model_validators: dict[str, Decorator[ModelValidatorDecoratorInfo]] = field(default_factory=dict)
|
411 |
+
computed_fields: dict[str, Decorator[ComputedFieldInfo]] = field(default_factory=dict)
|
412 |
+
|
413 |
+
@staticmethod
|
414 |
+
def build(model_dc: type[Any]) -> DecoratorInfos: # noqa: C901 (ignore complexity)
|
415 |
+
"""We want to collect all DecFunc instances that exist as
|
416 |
+
attributes in the namespace of the class (a BaseModel or dataclass)
|
417 |
+
that called us
|
418 |
+
But we want to collect these in the order of the bases
|
419 |
+
So instead of getting them all from the leaf class (the class that called us),
|
420 |
+
we traverse the bases from root (the oldest ancestor class) to leaf
|
421 |
+
and collect all of the instances as we go, taking care to replace
|
422 |
+
any duplicate ones with the last one we see to mimic how function overriding
|
423 |
+
works with inheritance.
|
424 |
+
If we do replace any functions we put the replacement into the position
|
425 |
+
the replaced function was in; that is, we maintain the order.
|
426 |
+
"""
|
427 |
+
# reminder: dicts are ordered and replacement does not alter the order
|
428 |
+
res = DecoratorInfos()
|
429 |
+
for base in reversed(mro(model_dc)[1:]):
|
430 |
+
existing: DecoratorInfos | None = base.__dict__.get('__pydantic_decorators__')
|
431 |
+
if existing is None:
|
432 |
+
existing = DecoratorInfos.build(base)
|
433 |
+
res.validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.validators.items()})
|
434 |
+
res.field_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_validators.items()})
|
435 |
+
res.root_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.root_validators.items()})
|
436 |
+
res.field_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_serializers.items()})
|
437 |
+
res.model_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_serializers.items()})
|
438 |
+
res.model_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_validators.items()})
|
439 |
+
res.computed_fields.update({k: v.bind_to_cls(model_dc) for k, v in existing.computed_fields.items()})
|
440 |
+
|
441 |
+
to_replace: list[tuple[str, Any]] = []
|
442 |
+
|
443 |
+
for var_name, var_value in vars(model_dc).items():
|
444 |
+
if isinstance(var_value, PydanticDescriptorProxy):
|
445 |
+
info = var_value.decorator_info
|
446 |
+
if isinstance(info, ValidatorDecoratorInfo):
|
447 |
+
res.validators[var_name] = Decorator.build(
|
448 |
+
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
449 |
+
)
|
450 |
+
elif isinstance(info, FieldValidatorDecoratorInfo):
|
451 |
+
res.field_validators[var_name] = Decorator.build(
|
452 |
+
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
453 |
+
)
|
454 |
+
elif isinstance(info, RootValidatorDecoratorInfo):
|
455 |
+
res.root_validators[var_name] = Decorator.build(
|
456 |
+
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
457 |
+
)
|
458 |
+
elif isinstance(info, FieldSerializerDecoratorInfo):
|
459 |
+
# check whether a serializer function is already registered for fields
|
460 |
+
for field_serializer_decorator in res.field_serializers.values():
|
461 |
+
# check that each field has at most one serializer function.
|
462 |
+
# serializer functions for the same field in subclasses are allowed,
|
463 |
+
# and are treated as overrides
|
464 |
+
if field_serializer_decorator.cls_var_name == var_name:
|
465 |
+
continue
|
466 |
+
for f in info.fields:
|
467 |
+
if f in field_serializer_decorator.info.fields:
|
468 |
+
raise PydanticUserError(
|
469 |
+
'Multiple field serializer functions were defined '
|
470 |
+
f'for field {f!r}, this is not allowed.',
|
471 |
+
code='multiple-field-serializers',
|
472 |
+
)
|
473 |
+
res.field_serializers[var_name] = Decorator.build(
|
474 |
+
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
475 |
+
)
|
476 |
+
elif isinstance(info, ModelValidatorDecoratorInfo):
|
477 |
+
res.model_validators[var_name] = Decorator.build(
|
478 |
+
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
479 |
+
)
|
480 |
+
elif isinstance(info, ModelSerializerDecoratorInfo):
|
481 |
+
res.model_serializers[var_name] = Decorator.build(
|
482 |
+
model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
|
483 |
+
)
|
484 |
+
else:
|
485 |
+
from ..fields import ComputedFieldInfo
|
486 |
+
|
487 |
+
isinstance(var_value, ComputedFieldInfo)
|
488 |
+
res.computed_fields[var_name] = Decorator.build(
|
489 |
+
model_dc, cls_var_name=var_name, shim=None, info=info
|
490 |
+
)
|
491 |
+
to_replace.append((var_name, var_value.wrapped))
|
492 |
+
if to_replace:
|
493 |
+
# If we can save `__pydantic_decorators__` on the class we'll be able to check for it above
|
494 |
+
# so then we don't need to re-process the type, which means we can discard our descriptor wrappers
|
495 |
+
# and replace them with the thing they are wrapping (see the other setattr call below)
|
496 |
+
# which allows validator class methods to also function as regular class methods
|
497 |
+
setattr(model_dc, '__pydantic_decorators__', res)
|
498 |
+
for name, value in to_replace:
|
499 |
+
setattr(model_dc, name, value)
|
500 |
+
return res
|
501 |
+
|
502 |
+
|
503 |
+
def inspect_validator(validator: Callable[..., Any], mode: FieldValidatorModes) -> bool:
|
504 |
+
"""Look at a field or model validator function and determine whether it takes an info argument.
|
505 |
+
|
506 |
+
An error is raised if the function has an invalid signature.
|
507 |
+
|
508 |
+
Args:
|
509 |
+
validator: The validator function to inspect.
|
510 |
+
mode: The proposed validator mode.
|
511 |
+
|
512 |
+
Returns:
|
513 |
+
Whether the validator takes an info argument.
|
514 |
+
"""
|
515 |
+
try:
|
516 |
+
sig = signature(validator)
|
517 |
+
except ValueError:
|
518 |
+
# builtins and some C extensions don't have signatures
|
519 |
+
# assume that they don't take an info argument and only take a single argument
|
520 |
+
# e.g. `str.strip` or `datetime.datetime`
|
521 |
+
return False
|
522 |
+
n_positional = count_positional_params(sig)
|
523 |
+
if mode == 'wrap':
|
524 |
+
if n_positional == 3:
|
525 |
+
return True
|
526 |
+
elif n_positional == 2:
|
527 |
+
return False
|
528 |
+
else:
|
529 |
+
assert mode in {'before', 'after', 'plain'}, f"invalid mode: {mode!r}, expected 'before', 'after' or 'plain"
|
530 |
+
if n_positional == 2:
|
531 |
+
return True
|
532 |
+
elif n_positional == 1:
|
533 |
+
return False
|
534 |
+
|
535 |
+
raise PydanticUserError(
|
536 |
+
f'Unrecognized field_validator function signature for {validator} with `mode={mode}`:{sig}',
|
537 |
+
code='validator-signature',
|
538 |
+
)
|
539 |
+
|
540 |
+
|
541 |
+
def inspect_field_serializer(
|
542 |
+
serializer: Callable[..., Any], mode: Literal['plain', 'wrap'], computed_field: bool = False
|
543 |
+
) -> tuple[bool, bool]:
|
544 |
+
"""Look at a field serializer function and determine if it is a field serializer,
|
545 |
+
and whether it takes an info argument.
|
546 |
+
|
547 |
+
An error is raised if the function has an invalid signature.
|
548 |
+
|
549 |
+
Args:
|
550 |
+
serializer: The serializer function to inspect.
|
551 |
+
mode: The serializer mode, either 'plain' or 'wrap'.
|
552 |
+
computed_field: When serializer is applied on computed_field. It doesn't require
|
553 |
+
info signature.
|
554 |
+
|
555 |
+
Returns:
|
556 |
+
Tuple of (is_field_serializer, info_arg).
|
557 |
+
"""
|
558 |
+
sig = signature(serializer)
|
559 |
+
|
560 |
+
first = next(iter(sig.parameters.values()), None)
|
561 |
+
is_field_serializer = first is not None and first.name == 'self'
|
562 |
+
|
563 |
+
n_positional = count_positional_params(sig)
|
564 |
+
if is_field_serializer:
|
565 |
+
# -1 to correct for self parameter
|
566 |
+
info_arg = _serializer_info_arg(mode, n_positional - 1)
|
567 |
+
else:
|
568 |
+
info_arg = _serializer_info_arg(mode, n_positional)
|
569 |
+
|
570 |
+
if info_arg is None:
|
571 |
+
raise PydanticUserError(
|
572 |
+
f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
573 |
+
code='field-serializer-signature',
|
574 |
+
)
|
575 |
+
if info_arg and computed_field:
|
576 |
+
raise PydanticUserError(
|
577 |
+
'field_serializer on computed_field does not use info signature', code='field-serializer-signature'
|
578 |
+
)
|
579 |
+
|
580 |
+
else:
|
581 |
+
return is_field_serializer, info_arg
|
582 |
+
|
583 |
+
|
584 |
+
def inspect_annotated_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
|
585 |
+
"""Look at a serializer function used via `Annotated` and determine whether it takes an info argument.
|
586 |
+
|
587 |
+
An error is raised if the function has an invalid signature.
|
588 |
+
|
589 |
+
Args:
|
590 |
+
serializer: The serializer function to check.
|
591 |
+
mode: The serializer mode, either 'plain' or 'wrap'.
|
592 |
+
|
593 |
+
Returns:
|
594 |
+
info_arg
|
595 |
+
"""
|
596 |
+
sig = signature(serializer)
|
597 |
+
info_arg = _serializer_info_arg(mode, count_positional_params(sig))
|
598 |
+
if info_arg is None:
|
599 |
+
raise PydanticUserError(
|
600 |
+
f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
601 |
+
code='field-serializer-signature',
|
602 |
+
)
|
603 |
+
else:
|
604 |
+
return info_arg
|
605 |
+
|
606 |
+
|
607 |
+
def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
|
608 |
+
"""Look at a model serializer function and determine whether it takes an info argument.
|
609 |
+
|
610 |
+
An error is raised if the function has an invalid signature.
|
611 |
+
|
612 |
+
Args:
|
613 |
+
serializer: The serializer function to check.
|
614 |
+
mode: The serializer mode, either 'plain' or 'wrap'.
|
615 |
+
|
616 |
+
Returns:
|
617 |
+
`info_arg` - whether the function expects an info argument.
|
618 |
+
"""
|
619 |
+
if isinstance(serializer, (staticmethod, classmethod)) or not is_instance_method_from_sig(serializer):
|
620 |
+
raise PydanticUserError(
|
621 |
+
'`@model_serializer` must be applied to instance methods', code='model-serializer-instance-method'
|
622 |
+
)
|
623 |
+
|
624 |
+
sig = signature(serializer)
|
625 |
+
info_arg = _serializer_info_arg(mode, count_positional_params(sig))
|
626 |
+
if info_arg is None:
|
627 |
+
raise PydanticUserError(
|
628 |
+
f'Unrecognized model_serializer function signature for {serializer} with `mode={mode}`:{sig}',
|
629 |
+
code='model-serializer-signature',
|
630 |
+
)
|
631 |
+
else:
|
632 |
+
return info_arg
|
633 |
+
|
634 |
+
|
635 |
+
def _serializer_info_arg(mode: Literal['plain', 'wrap'], n_positional: int) -> bool | None:
|
636 |
+
if mode == 'plain':
|
637 |
+
if n_positional == 1:
|
638 |
+
# (input_value: Any, /) -> Any
|
639 |
+
return False
|
640 |
+
elif n_positional == 2:
|
641 |
+
# (model: Any, input_value: Any, /) -> Any
|
642 |
+
return True
|
643 |
+
else:
|
644 |
+
assert mode == 'wrap', f"invalid mode: {mode!r}, expected 'plain' or 'wrap'"
|
645 |
+
if n_positional == 2:
|
646 |
+
# (input_value: Any, serializer: SerializerFunctionWrapHandler, /) -> Any
|
647 |
+
return False
|
648 |
+
elif n_positional == 3:
|
649 |
+
# (input_value: Any, serializer: SerializerFunctionWrapHandler, info: SerializationInfo, /) -> Any
|
650 |
+
return True
|
651 |
+
|
652 |
+
return None
|
653 |
+
|
654 |
+
|
655 |
+
AnyDecoratorCallable: TypeAlias = (
|
656 |
+
'Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any], Callable[..., Any]]'
|
657 |
+
)
|
658 |
+
|
659 |
+
|
660 |
+
def is_instance_method_from_sig(function: AnyDecoratorCallable) -> bool:
|
661 |
+
"""Whether the function is an instance method.
|
662 |
+
|
663 |
+
It will consider a function as instance method if the first parameter of
|
664 |
+
function is `self`.
|
665 |
+
|
666 |
+
Args:
|
667 |
+
function: The function to check.
|
668 |
+
|
669 |
+
Returns:
|
670 |
+
`True` if the function is an instance method, `False` otherwise.
|
671 |
+
"""
|
672 |
+
sig = signature(unwrap_wrapped_function(function))
|
673 |
+
first = next(iter(sig.parameters.values()), None)
|
674 |
+
if first and first.name == 'self':
|
675 |
+
return True
|
676 |
+
return False
|
677 |
+
|
678 |
+
|
679 |
+
def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable) -> Any:
|
680 |
+
"""Apply the `@classmethod` decorator on the function.
|
681 |
+
|
682 |
+
Args:
|
683 |
+
function: The function to apply the decorator on.
|
684 |
+
|
685 |
+
Return:
|
686 |
+
The `@classmethod` decorator applied function.
|
687 |
+
"""
|
688 |
+
if not isinstance(
|
689 |
+
unwrap_wrapped_function(function, unwrap_class_static_method=False), classmethod
|
690 |
+
) and _is_classmethod_from_sig(function):
|
691 |
+
return classmethod(function) # type: ignore[arg-type]
|
692 |
+
return function
|
693 |
+
|
694 |
+
|
695 |
+
def _is_classmethod_from_sig(function: AnyDecoratorCallable) -> bool:
|
696 |
+
sig = signature(unwrap_wrapped_function(function))
|
697 |
+
first = next(iter(sig.parameters.values()), None)
|
698 |
+
if first and first.name == 'cls':
|
699 |
+
return True
|
700 |
+
return False
|
701 |
+
|
702 |
+
|
703 |
+
def unwrap_wrapped_function(
|
704 |
+
func: Any,
|
705 |
+
*,
|
706 |
+
unwrap_partial: bool = True,
|
707 |
+
unwrap_class_static_method: bool = True,
|
708 |
+
) -> Any:
|
709 |
+
"""Recursively unwraps a wrapped function until the underlying function is reached.
|
710 |
+
This handles property, functools.partial, functools.partialmethod, staticmethod and classmethod.
|
711 |
+
|
712 |
+
Args:
|
713 |
+
func: The function to unwrap.
|
714 |
+
unwrap_partial: If True (default), unwrap partial and partialmethod decorators, otherwise don't.
|
715 |
+
decorators.
|
716 |
+
unwrap_class_static_method: If True (default), also unwrap classmethod and staticmethod
|
717 |
+
decorators. If False, only unwrap partial and partialmethod decorators.
|
718 |
+
|
719 |
+
Returns:
|
720 |
+
The underlying function of the wrapped function.
|
721 |
+
"""
|
722 |
+
all: set[Any] = {property, cached_property}
|
723 |
+
|
724 |
+
if unwrap_partial:
|
725 |
+
all.update({partial, partialmethod})
|
726 |
+
|
727 |
+
if unwrap_class_static_method:
|
728 |
+
all.update({staticmethod, classmethod})
|
729 |
+
|
730 |
+
while isinstance(func, tuple(all)):
|
731 |
+
if unwrap_class_static_method and isinstance(func, (classmethod, staticmethod)):
|
732 |
+
func = func.__func__
|
733 |
+
elif isinstance(func, (partial, partialmethod)):
|
734 |
+
func = func.func
|
735 |
+
elif isinstance(func, property):
|
736 |
+
func = func.fget # arbitrary choice, convenient for computed fields
|
737 |
+
else:
|
738 |
+
# Make coverage happy as it can only get here in the last possible case
|
739 |
+
assert isinstance(func, cached_property)
|
740 |
+
func = func.func # type: ignore
|
741 |
+
|
742 |
+
return func
|
743 |
+
|
744 |
+
|
745 |
+
def get_function_return_type(
|
746 |
+
func: Any, explicit_return_type: Any, types_namespace: dict[str, Any] | None = None
|
747 |
+
) -> Any:
|
748 |
+
"""Get the function return type.
|
749 |
+
|
750 |
+
It gets the return type from the type annotation if `explicit_return_type` is `None`.
|
751 |
+
Otherwise, it returns `explicit_return_type`.
|
752 |
+
|
753 |
+
Args:
|
754 |
+
func: The function to get its return type.
|
755 |
+
explicit_return_type: The explicit return type.
|
756 |
+
types_namespace: The types namespace, defaults to `None`.
|
757 |
+
|
758 |
+
Returns:
|
759 |
+
The function return type.
|
760 |
+
"""
|
761 |
+
if explicit_return_type is PydanticUndefined:
|
762 |
+
# try to get it from the type annotation
|
763 |
+
hints = get_function_type_hints(
|
764 |
+
unwrap_wrapped_function(func), include_keys={'return'}, types_namespace=types_namespace
|
765 |
+
)
|
766 |
+
return hints.get('return', PydanticUndefined)
|
767 |
+
else:
|
768 |
+
return explicit_return_type
|
769 |
+
|
770 |
+
|
771 |
+
def count_positional_params(sig: Signature) -> int:
|
772 |
+
return sum(1 for param in sig.parameters.values() if can_be_positional(param))
|
773 |
+
|
774 |
+
|
775 |
+
def can_be_positional(param: Parameter) -> bool:
|
776 |
+
return param.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
|
777 |
+
|
778 |
+
|
779 |
+
def ensure_property(f: Any) -> Any:
|
780 |
+
"""Ensure that a function is a `property` or `cached_property`, or is a valid descriptor.
|
781 |
+
|
782 |
+
Args:
|
783 |
+
f: The function to check.
|
784 |
+
|
785 |
+
Returns:
|
786 |
+
The function, or a `property` or `cached_property` instance wrapping the function.
|
787 |
+
"""
|
788 |
+
if ismethoddescriptor(f) or isdatadescriptor(f):
|
789 |
+
return f
|
790 |
+
else:
|
791 |
+
return property(f)
|