Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/aiohttp/abc.py +209 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/formdata.py +182 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/tracing.py +471 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/web_middlewares.py +116 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/web_response.py +819 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__init__.py +115 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__main__.py +6 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/big5prober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/chardistribution.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/charsetgroupprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/charsetprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/codingstatemachine.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/codingstatemachinedict.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/cp949prober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/enums.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/escprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/escsm.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/eucjpprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/euctwfreq.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/gb2312prober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/hebrewprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/jisfreq.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/johabfreq.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/johabprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/jpcntx.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/langhebrewmodel.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/langhungarianmodel.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/langrussianmodel.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/langturkishmodel.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/latin1prober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/macromanprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/mbcharsetprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/mbcssm.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/sbcharsetprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/sjisprober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/universaldetector.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/utf1632prober.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/chardistribution.py +261 -0
- llmeval-env/lib/python3.10/site-packages/chardet/charsetprober.py +147 -0
- llmeval-env/lib/python3.10/site-packages/chardet/cli/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/cli/__pycache__/chardetect.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/chardet/cli/chardetect.py +112 -0
- llmeval-env/lib/python3.10/site-packages/chardet/eucjpprober.py +102 -0
- llmeval-env/lib/python3.10/site-packages/chardet/euckrfreq.py +196 -0
- llmeval-env/lib/python3.10/site-packages/chardet/gb2312prober.py +47 -0
- llmeval-env/lib/python3.10/site-packages/chardet/jpcntx.py +238 -0
llmeval-env/lib/python3.10/site-packages/aiohttp/abc.py
ADDED
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import logging
|
3 |
+
from abc import ABC, abstractmethod
|
4 |
+
from collections.abc import Sized
|
5 |
+
from http.cookies import BaseCookie, Morsel
|
6 |
+
from typing import (
|
7 |
+
TYPE_CHECKING,
|
8 |
+
Any,
|
9 |
+
Awaitable,
|
10 |
+
Callable,
|
11 |
+
Dict,
|
12 |
+
Generator,
|
13 |
+
Iterable,
|
14 |
+
List,
|
15 |
+
Optional,
|
16 |
+
Tuple,
|
17 |
+
)
|
18 |
+
|
19 |
+
from multidict import CIMultiDict
|
20 |
+
from yarl import URL
|
21 |
+
|
22 |
+
from .helpers import get_running_loop
|
23 |
+
from .typedefs import LooseCookies
|
24 |
+
|
25 |
+
if TYPE_CHECKING:
|
26 |
+
from .web_app import Application
|
27 |
+
from .web_exceptions import HTTPException
|
28 |
+
from .web_request import BaseRequest, Request
|
29 |
+
from .web_response import StreamResponse
|
30 |
+
else:
|
31 |
+
BaseRequest = Request = Application = StreamResponse = None
|
32 |
+
HTTPException = None
|
33 |
+
|
34 |
+
|
35 |
+
class AbstractRouter(ABC):
|
36 |
+
def __init__(self) -> None:
|
37 |
+
self._frozen = False
|
38 |
+
|
39 |
+
def post_init(self, app: Application) -> None:
|
40 |
+
"""Post init stage.
|
41 |
+
|
42 |
+
Not an abstract method for sake of backward compatibility,
|
43 |
+
but if the router wants to be aware of the application
|
44 |
+
it can override this.
|
45 |
+
"""
|
46 |
+
|
47 |
+
@property
|
48 |
+
def frozen(self) -> bool:
|
49 |
+
return self._frozen
|
50 |
+
|
51 |
+
def freeze(self) -> None:
|
52 |
+
"""Freeze router."""
|
53 |
+
self._frozen = True
|
54 |
+
|
55 |
+
@abstractmethod
|
56 |
+
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
57 |
+
"""Return MATCH_INFO for given request"""
|
58 |
+
|
59 |
+
|
60 |
+
class AbstractMatchInfo(ABC):
|
61 |
+
@property # pragma: no branch
|
62 |
+
@abstractmethod
|
63 |
+
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
64 |
+
"""Execute matched request handler"""
|
65 |
+
|
66 |
+
@property
|
67 |
+
@abstractmethod
|
68 |
+
def expect_handler(
|
69 |
+
self,
|
70 |
+
) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
|
71 |
+
"""Expect handler for 100-continue processing"""
|
72 |
+
|
73 |
+
@property # pragma: no branch
|
74 |
+
@abstractmethod
|
75 |
+
def http_exception(self) -> Optional[HTTPException]:
|
76 |
+
"""HTTPException instance raised on router's resolving, or None"""
|
77 |
+
|
78 |
+
@abstractmethod # pragma: no branch
|
79 |
+
def get_info(self) -> Dict[str, Any]:
|
80 |
+
"""Return a dict with additional info useful for introspection"""
|
81 |
+
|
82 |
+
@property # pragma: no branch
|
83 |
+
@abstractmethod
|
84 |
+
def apps(self) -> Tuple[Application, ...]:
|
85 |
+
"""Stack of nested applications.
|
86 |
+
|
87 |
+
Top level application is left-most element.
|
88 |
+
|
89 |
+
"""
|
90 |
+
|
91 |
+
@abstractmethod
|
92 |
+
def add_app(self, app: Application) -> None:
|
93 |
+
"""Add application to the nested apps stack."""
|
94 |
+
|
95 |
+
@abstractmethod
|
96 |
+
def freeze(self) -> None:
|
97 |
+
"""Freeze the match info.
|
98 |
+
|
99 |
+
The method is called after route resolution.
|
100 |
+
|
101 |
+
After the call .add_app() is forbidden.
|
102 |
+
|
103 |
+
"""
|
104 |
+
|
105 |
+
|
106 |
+
class AbstractView(ABC):
|
107 |
+
"""Abstract class based view."""
|
108 |
+
|
109 |
+
def __init__(self, request: Request) -> None:
|
110 |
+
self._request = request
|
111 |
+
|
112 |
+
@property
|
113 |
+
def request(self) -> Request:
|
114 |
+
"""Request instance."""
|
115 |
+
return self._request
|
116 |
+
|
117 |
+
@abstractmethod
|
118 |
+
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
119 |
+
"""Execute the view handler."""
|
120 |
+
|
121 |
+
|
122 |
+
class AbstractResolver(ABC):
|
123 |
+
"""Abstract DNS resolver."""
|
124 |
+
|
125 |
+
@abstractmethod
|
126 |
+
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
|
127 |
+
"""Return IP address for given hostname"""
|
128 |
+
|
129 |
+
@abstractmethod
|
130 |
+
async def close(self) -> None:
|
131 |
+
"""Release resolver"""
|
132 |
+
|
133 |
+
|
134 |
+
if TYPE_CHECKING:
|
135 |
+
IterableBase = Iterable[Morsel[str]]
|
136 |
+
else:
|
137 |
+
IterableBase = Iterable
|
138 |
+
|
139 |
+
|
140 |
+
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
141 |
+
|
142 |
+
|
143 |
+
class AbstractCookieJar(Sized, IterableBase):
|
144 |
+
"""Abstract Cookie Jar."""
|
145 |
+
|
146 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
147 |
+
self._loop = get_running_loop(loop)
|
148 |
+
|
149 |
+
@abstractmethod
|
150 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
151 |
+
"""Clear all cookies if no predicate is passed."""
|
152 |
+
|
153 |
+
@abstractmethod
|
154 |
+
def clear_domain(self, domain: str) -> None:
|
155 |
+
"""Clear all cookies for domain and all subdomains."""
|
156 |
+
|
157 |
+
@abstractmethod
|
158 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
159 |
+
"""Update cookies."""
|
160 |
+
|
161 |
+
@abstractmethod
|
162 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
163 |
+
"""Return the jar's cookies filtered by their attributes."""
|
164 |
+
|
165 |
+
|
166 |
+
class AbstractStreamWriter(ABC):
|
167 |
+
"""Abstract stream writer."""
|
168 |
+
|
169 |
+
buffer_size = 0
|
170 |
+
output_size = 0
|
171 |
+
length: Optional[int] = 0
|
172 |
+
|
173 |
+
@abstractmethod
|
174 |
+
async def write(self, chunk: bytes) -> None:
|
175 |
+
"""Write chunk into stream."""
|
176 |
+
|
177 |
+
@abstractmethod
|
178 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
179 |
+
"""Write last chunk."""
|
180 |
+
|
181 |
+
@abstractmethod
|
182 |
+
async def drain(self) -> None:
|
183 |
+
"""Flush the write buffer."""
|
184 |
+
|
185 |
+
@abstractmethod
|
186 |
+
def enable_compression(self, encoding: str = "deflate") -> None:
|
187 |
+
"""Enable HTTP body compression"""
|
188 |
+
|
189 |
+
@abstractmethod
|
190 |
+
def enable_chunking(self) -> None:
|
191 |
+
"""Enable HTTP chunked mode"""
|
192 |
+
|
193 |
+
@abstractmethod
|
194 |
+
async def write_headers(
|
195 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
196 |
+
) -> None:
|
197 |
+
"""Write HTTP headers"""
|
198 |
+
|
199 |
+
|
200 |
+
class AbstractAccessLogger(ABC):
|
201 |
+
"""Abstract writer to access log."""
|
202 |
+
|
203 |
+
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
204 |
+
self.logger = logger
|
205 |
+
self.log_format = log_format
|
206 |
+
|
207 |
+
@abstractmethod
|
208 |
+
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
209 |
+
"""Emit log to logger."""
|
llmeval-env/lib/python3.10/site-packages/aiohttp/formdata.py
ADDED
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import io
|
2 |
+
import warnings
|
3 |
+
from typing import Any, Iterable, List, Optional
|
4 |
+
from urllib.parse import urlencode
|
5 |
+
|
6 |
+
from multidict import MultiDict, MultiDictProxy
|
7 |
+
|
8 |
+
from . import hdrs, multipart, payload
|
9 |
+
from .helpers import guess_filename
|
10 |
+
from .payload import Payload
|
11 |
+
|
12 |
+
__all__ = ("FormData",)
|
13 |
+
|
14 |
+
|
15 |
+
class FormData:
|
16 |
+
"""Helper class for form body generation.
|
17 |
+
|
18 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
19 |
+
"""
|
20 |
+
|
21 |
+
def __init__(
|
22 |
+
self,
|
23 |
+
fields: Iterable[Any] = (),
|
24 |
+
quote_fields: bool = True,
|
25 |
+
charset: Optional[str] = None,
|
26 |
+
) -> None:
|
27 |
+
self._writer = multipart.MultipartWriter("form-data")
|
28 |
+
self._fields: List[Any] = []
|
29 |
+
self._is_multipart = False
|
30 |
+
self._is_processed = False
|
31 |
+
self._quote_fields = quote_fields
|
32 |
+
self._charset = charset
|
33 |
+
|
34 |
+
if isinstance(fields, dict):
|
35 |
+
fields = list(fields.items())
|
36 |
+
elif not isinstance(fields, (list, tuple)):
|
37 |
+
fields = (fields,)
|
38 |
+
self.add_fields(*fields)
|
39 |
+
|
40 |
+
@property
|
41 |
+
def is_multipart(self) -> bool:
|
42 |
+
return self._is_multipart
|
43 |
+
|
44 |
+
def add_field(
|
45 |
+
self,
|
46 |
+
name: str,
|
47 |
+
value: Any,
|
48 |
+
*,
|
49 |
+
content_type: Optional[str] = None,
|
50 |
+
filename: Optional[str] = None,
|
51 |
+
content_transfer_encoding: Optional[str] = None,
|
52 |
+
) -> None:
|
53 |
+
|
54 |
+
if isinstance(value, io.IOBase):
|
55 |
+
self._is_multipart = True
|
56 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
57 |
+
msg = (
|
58 |
+
"In v4, passing bytes will no longer create a file field. "
|
59 |
+
"Please explicitly use the filename parameter or pass a BytesIO object."
|
60 |
+
)
|
61 |
+
if filename is None and content_transfer_encoding is None:
|
62 |
+
warnings.warn(msg, DeprecationWarning)
|
63 |
+
filename = name
|
64 |
+
|
65 |
+
type_options: MultiDict[str] = MultiDict({"name": name})
|
66 |
+
if filename is not None and not isinstance(filename, str):
|
67 |
+
raise TypeError(
|
68 |
+
"filename must be an instance of str. " "Got: %s" % filename
|
69 |
+
)
|
70 |
+
if filename is None and isinstance(value, io.IOBase):
|
71 |
+
filename = guess_filename(value, name)
|
72 |
+
if filename is not None:
|
73 |
+
type_options["filename"] = filename
|
74 |
+
self._is_multipart = True
|
75 |
+
|
76 |
+
headers = {}
|
77 |
+
if content_type is not None:
|
78 |
+
if not isinstance(content_type, str):
|
79 |
+
raise TypeError(
|
80 |
+
"content_type must be an instance of str. " "Got: %s" % content_type
|
81 |
+
)
|
82 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
83 |
+
self._is_multipart = True
|
84 |
+
if content_transfer_encoding is not None:
|
85 |
+
if not isinstance(content_transfer_encoding, str):
|
86 |
+
raise TypeError(
|
87 |
+
"content_transfer_encoding must be an instance"
|
88 |
+
" of str. Got: %s" % content_transfer_encoding
|
89 |
+
)
|
90 |
+
msg = (
|
91 |
+
"content_transfer_encoding is deprecated. "
|
92 |
+
"To maintain compatibility with v4 please pass a BytesPayload."
|
93 |
+
)
|
94 |
+
warnings.warn(msg, DeprecationWarning)
|
95 |
+
self._is_multipart = True
|
96 |
+
|
97 |
+
self._fields.append((type_options, headers, value))
|
98 |
+
|
99 |
+
def add_fields(self, *fields: Any) -> None:
|
100 |
+
to_add = list(fields)
|
101 |
+
|
102 |
+
while to_add:
|
103 |
+
rec = to_add.pop(0)
|
104 |
+
|
105 |
+
if isinstance(rec, io.IOBase):
|
106 |
+
k = guess_filename(rec, "unknown")
|
107 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
108 |
+
|
109 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
110 |
+
to_add.extend(rec.items())
|
111 |
+
|
112 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
113 |
+
k, fp = rec
|
114 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
115 |
+
|
116 |
+
else:
|
117 |
+
raise TypeError(
|
118 |
+
"Only io.IOBase, multidict and (name, file) "
|
119 |
+
"pairs allowed, use .add_field() for passing "
|
120 |
+
"more complex parameters, got {!r}".format(rec)
|
121 |
+
)
|
122 |
+
|
123 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
124 |
+
# form data (x-www-form-urlencoded)
|
125 |
+
data = []
|
126 |
+
for type_options, _, value in self._fields:
|
127 |
+
data.append((type_options["name"], value))
|
128 |
+
|
129 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
130 |
+
|
131 |
+
if charset == "utf-8":
|
132 |
+
content_type = "application/x-www-form-urlencoded"
|
133 |
+
else:
|
134 |
+
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
135 |
+
|
136 |
+
return payload.BytesPayload(
|
137 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
138 |
+
content_type=content_type,
|
139 |
+
)
|
140 |
+
|
141 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
142 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
143 |
+
if self._is_processed:
|
144 |
+
raise RuntimeError("Form data has been processed already")
|
145 |
+
for dispparams, headers, value in self._fields:
|
146 |
+
try:
|
147 |
+
if hdrs.CONTENT_TYPE in headers:
|
148 |
+
part = payload.get_payload(
|
149 |
+
value,
|
150 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
151 |
+
headers=headers,
|
152 |
+
encoding=self._charset,
|
153 |
+
)
|
154 |
+
else:
|
155 |
+
part = payload.get_payload(
|
156 |
+
value, headers=headers, encoding=self._charset
|
157 |
+
)
|
158 |
+
except Exception as exc:
|
159 |
+
raise TypeError(
|
160 |
+
"Can not serialize value type: %r\n "
|
161 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
162 |
+
) from exc
|
163 |
+
|
164 |
+
if dispparams:
|
165 |
+
part.set_content_disposition(
|
166 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
167 |
+
)
|
168 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
169 |
+
# Content-Length which were sent via chunked transfer encoding
|
170 |
+
assert part.headers is not None
|
171 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
172 |
+
|
173 |
+
self._writer.append_payload(part)
|
174 |
+
|
175 |
+
self._is_processed = True
|
176 |
+
return self._writer
|
177 |
+
|
178 |
+
def __call__(self) -> Payload:
|
179 |
+
if self._is_multipart:
|
180 |
+
return self._gen_form_data()
|
181 |
+
else:
|
182 |
+
return self._gen_form_urlencoded()
|
llmeval-env/lib/python3.10/site-packages/aiohttp/tracing.py
ADDED
@@ -0,0 +1,471 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from types import SimpleNamespace
|
2 |
+
from typing import TYPE_CHECKING, Awaitable, Optional, Protocol, Type, TypeVar
|
3 |
+
|
4 |
+
import attr
|
5 |
+
from aiosignal import Signal
|
6 |
+
from multidict import CIMultiDict
|
7 |
+
from yarl import URL
|
8 |
+
|
9 |
+
from .client_reqrep import ClientResponse
|
10 |
+
|
11 |
+
if TYPE_CHECKING:
|
12 |
+
from .client import ClientSession
|
13 |
+
|
14 |
+
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
|
15 |
+
|
16 |
+
class _SignalCallback(Protocol[_ParamT_contra]):
|
17 |
+
def __call__(
|
18 |
+
self,
|
19 |
+
__client_session: ClientSession,
|
20 |
+
__trace_config_ctx: SimpleNamespace,
|
21 |
+
__params: _ParamT_contra,
|
22 |
+
) -> Awaitable[None]:
|
23 |
+
...
|
24 |
+
|
25 |
+
|
26 |
+
__all__ = (
|
27 |
+
"TraceConfig",
|
28 |
+
"TraceRequestStartParams",
|
29 |
+
"TraceRequestEndParams",
|
30 |
+
"TraceRequestExceptionParams",
|
31 |
+
"TraceConnectionQueuedStartParams",
|
32 |
+
"TraceConnectionQueuedEndParams",
|
33 |
+
"TraceConnectionCreateStartParams",
|
34 |
+
"TraceConnectionCreateEndParams",
|
35 |
+
"TraceConnectionReuseconnParams",
|
36 |
+
"TraceDnsResolveHostStartParams",
|
37 |
+
"TraceDnsResolveHostEndParams",
|
38 |
+
"TraceDnsCacheHitParams",
|
39 |
+
"TraceDnsCacheMissParams",
|
40 |
+
"TraceRequestRedirectParams",
|
41 |
+
"TraceRequestChunkSentParams",
|
42 |
+
"TraceResponseChunkReceivedParams",
|
43 |
+
"TraceRequestHeadersSentParams",
|
44 |
+
)
|
45 |
+
|
46 |
+
|
47 |
+
class TraceConfig:
|
48 |
+
"""First-class used to trace requests launched via ClientSession objects."""
|
49 |
+
|
50 |
+
def __init__(
|
51 |
+
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
|
52 |
+
) -> None:
|
53 |
+
self._on_request_start: Signal[
|
54 |
+
_SignalCallback[TraceRequestStartParams]
|
55 |
+
] = Signal(self)
|
56 |
+
self._on_request_chunk_sent: Signal[
|
57 |
+
_SignalCallback[TraceRequestChunkSentParams]
|
58 |
+
] = Signal(self)
|
59 |
+
self._on_response_chunk_received: Signal[
|
60 |
+
_SignalCallback[TraceResponseChunkReceivedParams]
|
61 |
+
] = Signal(self)
|
62 |
+
self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
|
63 |
+
self
|
64 |
+
)
|
65 |
+
self._on_request_exception: Signal[
|
66 |
+
_SignalCallback[TraceRequestExceptionParams]
|
67 |
+
] = Signal(self)
|
68 |
+
self._on_request_redirect: Signal[
|
69 |
+
_SignalCallback[TraceRequestRedirectParams]
|
70 |
+
] = Signal(self)
|
71 |
+
self._on_connection_queued_start: Signal[
|
72 |
+
_SignalCallback[TraceConnectionQueuedStartParams]
|
73 |
+
] = Signal(self)
|
74 |
+
self._on_connection_queued_end: Signal[
|
75 |
+
_SignalCallback[TraceConnectionQueuedEndParams]
|
76 |
+
] = Signal(self)
|
77 |
+
self._on_connection_create_start: Signal[
|
78 |
+
_SignalCallback[TraceConnectionCreateStartParams]
|
79 |
+
] = Signal(self)
|
80 |
+
self._on_connection_create_end: Signal[
|
81 |
+
_SignalCallback[TraceConnectionCreateEndParams]
|
82 |
+
] = Signal(self)
|
83 |
+
self._on_connection_reuseconn: Signal[
|
84 |
+
_SignalCallback[TraceConnectionReuseconnParams]
|
85 |
+
] = Signal(self)
|
86 |
+
self._on_dns_resolvehost_start: Signal[
|
87 |
+
_SignalCallback[TraceDnsResolveHostStartParams]
|
88 |
+
] = Signal(self)
|
89 |
+
self._on_dns_resolvehost_end: Signal[
|
90 |
+
_SignalCallback[TraceDnsResolveHostEndParams]
|
91 |
+
] = Signal(self)
|
92 |
+
self._on_dns_cache_hit: Signal[
|
93 |
+
_SignalCallback[TraceDnsCacheHitParams]
|
94 |
+
] = Signal(self)
|
95 |
+
self._on_dns_cache_miss: Signal[
|
96 |
+
_SignalCallback[TraceDnsCacheMissParams]
|
97 |
+
] = Signal(self)
|
98 |
+
self._on_request_headers_sent: Signal[
|
99 |
+
_SignalCallback[TraceRequestHeadersSentParams]
|
100 |
+
] = Signal(self)
|
101 |
+
|
102 |
+
self._trace_config_ctx_factory = trace_config_ctx_factory
|
103 |
+
|
104 |
+
def trace_config_ctx(
|
105 |
+
self, trace_request_ctx: Optional[SimpleNamespace] = None
|
106 |
+
) -> SimpleNamespace:
|
107 |
+
"""Return a new trace_config_ctx instance"""
|
108 |
+
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
|
109 |
+
|
110 |
+
def freeze(self) -> None:
|
111 |
+
self._on_request_start.freeze()
|
112 |
+
self._on_request_chunk_sent.freeze()
|
113 |
+
self._on_response_chunk_received.freeze()
|
114 |
+
self._on_request_end.freeze()
|
115 |
+
self._on_request_exception.freeze()
|
116 |
+
self._on_request_redirect.freeze()
|
117 |
+
self._on_connection_queued_start.freeze()
|
118 |
+
self._on_connection_queued_end.freeze()
|
119 |
+
self._on_connection_create_start.freeze()
|
120 |
+
self._on_connection_create_end.freeze()
|
121 |
+
self._on_connection_reuseconn.freeze()
|
122 |
+
self._on_dns_resolvehost_start.freeze()
|
123 |
+
self._on_dns_resolvehost_end.freeze()
|
124 |
+
self._on_dns_cache_hit.freeze()
|
125 |
+
self._on_dns_cache_miss.freeze()
|
126 |
+
self._on_request_headers_sent.freeze()
|
127 |
+
|
128 |
+
@property
|
129 |
+
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
|
130 |
+
return self._on_request_start
|
131 |
+
|
132 |
+
@property
|
133 |
+
def on_request_chunk_sent(
|
134 |
+
self,
|
135 |
+
) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
|
136 |
+
return self._on_request_chunk_sent
|
137 |
+
|
138 |
+
@property
|
139 |
+
def on_response_chunk_received(
|
140 |
+
self,
|
141 |
+
) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
|
142 |
+
return self._on_response_chunk_received
|
143 |
+
|
144 |
+
@property
|
145 |
+
def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
|
146 |
+
return self._on_request_end
|
147 |
+
|
148 |
+
@property
|
149 |
+
def on_request_exception(
|
150 |
+
self,
|
151 |
+
) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
|
152 |
+
return self._on_request_exception
|
153 |
+
|
154 |
+
@property
|
155 |
+
def on_request_redirect(
|
156 |
+
self,
|
157 |
+
) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
|
158 |
+
return self._on_request_redirect
|
159 |
+
|
160 |
+
@property
|
161 |
+
def on_connection_queued_start(
|
162 |
+
self,
|
163 |
+
) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
|
164 |
+
return self._on_connection_queued_start
|
165 |
+
|
166 |
+
@property
|
167 |
+
def on_connection_queued_end(
|
168 |
+
self,
|
169 |
+
) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
|
170 |
+
return self._on_connection_queued_end
|
171 |
+
|
172 |
+
@property
|
173 |
+
def on_connection_create_start(
|
174 |
+
self,
|
175 |
+
) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
|
176 |
+
return self._on_connection_create_start
|
177 |
+
|
178 |
+
@property
|
179 |
+
def on_connection_create_end(
|
180 |
+
self,
|
181 |
+
) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
|
182 |
+
return self._on_connection_create_end
|
183 |
+
|
184 |
+
@property
|
185 |
+
def on_connection_reuseconn(
|
186 |
+
self,
|
187 |
+
) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
|
188 |
+
return self._on_connection_reuseconn
|
189 |
+
|
190 |
+
@property
|
191 |
+
def on_dns_resolvehost_start(
|
192 |
+
self,
|
193 |
+
) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
|
194 |
+
return self._on_dns_resolvehost_start
|
195 |
+
|
196 |
+
@property
|
197 |
+
def on_dns_resolvehost_end(
|
198 |
+
self,
|
199 |
+
) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
|
200 |
+
return self._on_dns_resolvehost_end
|
201 |
+
|
202 |
+
@property
|
203 |
+
def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
|
204 |
+
return self._on_dns_cache_hit
|
205 |
+
|
206 |
+
@property
|
207 |
+
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
|
208 |
+
return self._on_dns_cache_miss
|
209 |
+
|
210 |
+
@property
|
211 |
+
def on_request_headers_sent(
|
212 |
+
self,
|
213 |
+
) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
|
214 |
+
return self._on_request_headers_sent
|
215 |
+
|
216 |
+
|
217 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
218 |
+
class TraceRequestStartParams:
|
219 |
+
"""Parameters sent by the `on_request_start` signal"""
|
220 |
+
|
221 |
+
method: str
|
222 |
+
url: URL
|
223 |
+
headers: "CIMultiDict[str]"
|
224 |
+
|
225 |
+
|
226 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
227 |
+
class TraceRequestChunkSentParams:
|
228 |
+
"""Parameters sent by the `on_request_chunk_sent` signal"""
|
229 |
+
|
230 |
+
method: str
|
231 |
+
url: URL
|
232 |
+
chunk: bytes
|
233 |
+
|
234 |
+
|
235 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
236 |
+
class TraceResponseChunkReceivedParams:
|
237 |
+
"""Parameters sent by the `on_response_chunk_received` signal"""
|
238 |
+
|
239 |
+
method: str
|
240 |
+
url: URL
|
241 |
+
chunk: bytes
|
242 |
+
|
243 |
+
|
244 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
245 |
+
class TraceRequestEndParams:
|
246 |
+
"""Parameters sent by the `on_request_end` signal"""
|
247 |
+
|
248 |
+
method: str
|
249 |
+
url: URL
|
250 |
+
headers: "CIMultiDict[str]"
|
251 |
+
response: ClientResponse
|
252 |
+
|
253 |
+
|
254 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
255 |
+
class TraceRequestExceptionParams:
|
256 |
+
"""Parameters sent by the `on_request_exception` signal"""
|
257 |
+
|
258 |
+
method: str
|
259 |
+
url: URL
|
260 |
+
headers: "CIMultiDict[str]"
|
261 |
+
exception: BaseException
|
262 |
+
|
263 |
+
|
264 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
265 |
+
class TraceRequestRedirectParams:
|
266 |
+
"""Parameters sent by the `on_request_redirect` signal"""
|
267 |
+
|
268 |
+
method: str
|
269 |
+
url: URL
|
270 |
+
headers: "CIMultiDict[str]"
|
271 |
+
response: ClientResponse
|
272 |
+
|
273 |
+
|
274 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
275 |
+
class TraceConnectionQueuedStartParams:
|
276 |
+
"""Parameters sent by the `on_connection_queued_start` signal"""
|
277 |
+
|
278 |
+
|
279 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
280 |
+
class TraceConnectionQueuedEndParams:
|
281 |
+
"""Parameters sent by the `on_connection_queued_end` signal"""
|
282 |
+
|
283 |
+
|
284 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
285 |
+
class TraceConnectionCreateStartParams:
|
286 |
+
"""Parameters sent by the `on_connection_create_start` signal"""
|
287 |
+
|
288 |
+
|
289 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
290 |
+
class TraceConnectionCreateEndParams:
|
291 |
+
"""Parameters sent by the `on_connection_create_end` signal"""
|
292 |
+
|
293 |
+
|
294 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
295 |
+
class TraceConnectionReuseconnParams:
|
296 |
+
"""Parameters sent by the `on_connection_reuseconn` signal"""
|
297 |
+
|
298 |
+
|
299 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
300 |
+
class TraceDnsResolveHostStartParams:
|
301 |
+
"""Parameters sent by the `on_dns_resolvehost_start` signal"""
|
302 |
+
|
303 |
+
host: str
|
304 |
+
|
305 |
+
|
306 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
307 |
+
class TraceDnsResolveHostEndParams:
|
308 |
+
"""Parameters sent by the `on_dns_resolvehost_end` signal"""
|
309 |
+
|
310 |
+
host: str
|
311 |
+
|
312 |
+
|
313 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
314 |
+
class TraceDnsCacheHitParams:
|
315 |
+
"""Parameters sent by the `on_dns_cache_hit` signal"""
|
316 |
+
|
317 |
+
host: str
|
318 |
+
|
319 |
+
|
320 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
321 |
+
class TraceDnsCacheMissParams:
|
322 |
+
"""Parameters sent by the `on_dns_cache_miss` signal"""
|
323 |
+
|
324 |
+
host: str
|
325 |
+
|
326 |
+
|
327 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
328 |
+
class TraceRequestHeadersSentParams:
|
329 |
+
"""Parameters sent by the `on_request_headers_sent` signal"""
|
330 |
+
|
331 |
+
method: str
|
332 |
+
url: URL
|
333 |
+
headers: "CIMultiDict[str]"
|
334 |
+
|
335 |
+
|
336 |
+
class Trace:
|
337 |
+
"""Internal dependency holder class.
|
338 |
+
|
339 |
+
Used to keep together the main dependencies used
|
340 |
+
at the moment of send a signal.
|
341 |
+
"""
|
342 |
+
|
343 |
+
def __init__(
|
344 |
+
self,
|
345 |
+
session: "ClientSession",
|
346 |
+
trace_config: TraceConfig,
|
347 |
+
trace_config_ctx: SimpleNamespace,
|
348 |
+
) -> None:
|
349 |
+
self._trace_config = trace_config
|
350 |
+
self._trace_config_ctx = trace_config_ctx
|
351 |
+
self._session = session
|
352 |
+
|
353 |
+
async def send_request_start(
|
354 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
355 |
+
) -> None:
|
356 |
+
return await self._trace_config.on_request_start.send(
|
357 |
+
self._session,
|
358 |
+
self._trace_config_ctx,
|
359 |
+
TraceRequestStartParams(method, url, headers),
|
360 |
+
)
|
361 |
+
|
362 |
+
async def send_request_chunk_sent(
|
363 |
+
self, method: str, url: URL, chunk: bytes
|
364 |
+
) -> None:
|
365 |
+
return await self._trace_config.on_request_chunk_sent.send(
|
366 |
+
self._session,
|
367 |
+
self._trace_config_ctx,
|
368 |
+
TraceRequestChunkSentParams(method, url, chunk),
|
369 |
+
)
|
370 |
+
|
371 |
+
async def send_response_chunk_received(
|
372 |
+
self, method: str, url: URL, chunk: bytes
|
373 |
+
) -> None:
|
374 |
+
return await self._trace_config.on_response_chunk_received.send(
|
375 |
+
self._session,
|
376 |
+
self._trace_config_ctx,
|
377 |
+
TraceResponseChunkReceivedParams(method, url, chunk),
|
378 |
+
)
|
379 |
+
|
380 |
+
async def send_request_end(
|
381 |
+
self,
|
382 |
+
method: str,
|
383 |
+
url: URL,
|
384 |
+
headers: "CIMultiDict[str]",
|
385 |
+
response: ClientResponse,
|
386 |
+
) -> None:
|
387 |
+
return await self._trace_config.on_request_end.send(
|
388 |
+
self._session,
|
389 |
+
self._trace_config_ctx,
|
390 |
+
TraceRequestEndParams(method, url, headers, response),
|
391 |
+
)
|
392 |
+
|
393 |
+
async def send_request_exception(
|
394 |
+
self,
|
395 |
+
method: str,
|
396 |
+
url: URL,
|
397 |
+
headers: "CIMultiDict[str]",
|
398 |
+
exception: BaseException,
|
399 |
+
) -> None:
|
400 |
+
return await self._trace_config.on_request_exception.send(
|
401 |
+
self._session,
|
402 |
+
self._trace_config_ctx,
|
403 |
+
TraceRequestExceptionParams(method, url, headers, exception),
|
404 |
+
)
|
405 |
+
|
406 |
+
async def send_request_redirect(
|
407 |
+
self,
|
408 |
+
method: str,
|
409 |
+
url: URL,
|
410 |
+
headers: "CIMultiDict[str]",
|
411 |
+
response: ClientResponse,
|
412 |
+
) -> None:
|
413 |
+
return await self._trace_config._on_request_redirect.send(
|
414 |
+
self._session,
|
415 |
+
self._trace_config_ctx,
|
416 |
+
TraceRequestRedirectParams(method, url, headers, response),
|
417 |
+
)
|
418 |
+
|
419 |
+
async def send_connection_queued_start(self) -> None:
|
420 |
+
return await self._trace_config.on_connection_queued_start.send(
|
421 |
+
self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
|
422 |
+
)
|
423 |
+
|
424 |
+
async def send_connection_queued_end(self) -> None:
|
425 |
+
return await self._trace_config.on_connection_queued_end.send(
|
426 |
+
self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
|
427 |
+
)
|
428 |
+
|
429 |
+
async def send_connection_create_start(self) -> None:
|
430 |
+
return await self._trace_config.on_connection_create_start.send(
|
431 |
+
self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
|
432 |
+
)
|
433 |
+
|
434 |
+
async def send_connection_create_end(self) -> None:
|
435 |
+
return await self._trace_config.on_connection_create_end.send(
|
436 |
+
self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
|
437 |
+
)
|
438 |
+
|
439 |
+
async def send_connection_reuseconn(self) -> None:
|
440 |
+
return await self._trace_config.on_connection_reuseconn.send(
|
441 |
+
self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
|
442 |
+
)
|
443 |
+
|
444 |
+
async def send_dns_resolvehost_start(self, host: str) -> None:
|
445 |
+
return await self._trace_config.on_dns_resolvehost_start.send(
|
446 |
+
self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
|
447 |
+
)
|
448 |
+
|
449 |
+
async def send_dns_resolvehost_end(self, host: str) -> None:
|
450 |
+
return await self._trace_config.on_dns_resolvehost_end.send(
|
451 |
+
self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
|
452 |
+
)
|
453 |
+
|
454 |
+
async def send_dns_cache_hit(self, host: str) -> None:
|
455 |
+
return await self._trace_config.on_dns_cache_hit.send(
|
456 |
+
self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
|
457 |
+
)
|
458 |
+
|
459 |
+
async def send_dns_cache_miss(self, host: str) -> None:
|
460 |
+
return await self._trace_config.on_dns_cache_miss.send(
|
461 |
+
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
|
462 |
+
)
|
463 |
+
|
464 |
+
async def send_request_headers(
|
465 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
466 |
+
) -> None:
|
467 |
+
return await self._trace_config._on_request_headers_sent.send(
|
468 |
+
self._session,
|
469 |
+
self._trace_config_ctx,
|
470 |
+
TraceRequestHeadersSentParams(method, url, headers),
|
471 |
+
)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/web_middlewares.py
ADDED
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import re
|
2 |
+
from typing import TYPE_CHECKING, Tuple, Type, TypeVar
|
3 |
+
|
4 |
+
from .typedefs import Handler, Middleware
|
5 |
+
from .web_exceptions import HTTPMove, HTTPPermanentRedirect
|
6 |
+
from .web_request import Request
|
7 |
+
from .web_response import StreamResponse
|
8 |
+
from .web_urldispatcher import SystemRoute
|
9 |
+
|
10 |
+
__all__ = (
|
11 |
+
"middleware",
|
12 |
+
"normalize_path_middleware",
|
13 |
+
)
|
14 |
+
|
15 |
+
if TYPE_CHECKING:
|
16 |
+
from .web_app import Application
|
17 |
+
|
18 |
+
_Func = TypeVar("_Func")
|
19 |
+
|
20 |
+
|
21 |
+
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
|
22 |
+
alt_request = request.clone(rel_url=path)
|
23 |
+
|
24 |
+
match_info = await request.app.router.resolve(alt_request)
|
25 |
+
alt_request._match_info = match_info
|
26 |
+
|
27 |
+
if match_info.http_exception is None:
|
28 |
+
return True, alt_request
|
29 |
+
|
30 |
+
return False, request
|
31 |
+
|
32 |
+
|
33 |
+
def middleware(f: _Func) -> _Func:
|
34 |
+
f.__middleware_version__ = 1 # type: ignore[attr-defined]
|
35 |
+
return f
|
36 |
+
|
37 |
+
|
38 |
+
def normalize_path_middleware(
|
39 |
+
*,
|
40 |
+
append_slash: bool = True,
|
41 |
+
remove_slash: bool = False,
|
42 |
+
merge_slashes: bool = True,
|
43 |
+
redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
|
44 |
+
) -> Middleware:
|
45 |
+
"""Factory for producing a middleware that normalizes the path of a request.
|
46 |
+
|
47 |
+
Normalizing means:
|
48 |
+
- Add or remove a trailing slash to the path.
|
49 |
+
- Double slashes are replaced by one.
|
50 |
+
|
51 |
+
The middleware returns as soon as it finds a path that resolves
|
52 |
+
correctly. The order if both merge and append/remove are enabled is
|
53 |
+
1) merge slashes
|
54 |
+
2) append/remove slash
|
55 |
+
3) both merge slashes and append/remove slash.
|
56 |
+
If the path resolves with at least one of those conditions, it will
|
57 |
+
redirect to the new path.
|
58 |
+
|
59 |
+
Only one of `append_slash` and `remove_slash` can be enabled. If both
|
60 |
+
are `True` the factory will raise an assertion error
|
61 |
+
|
62 |
+
If `append_slash` is `True` the middleware will append a slash when
|
63 |
+
needed. If a resource is defined with trailing slash and the request
|
64 |
+
comes without it, it will append it automatically.
|
65 |
+
|
66 |
+
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
|
67 |
+
the middleware will remove trailing slashes and redirect if the resource
|
68 |
+
is defined
|
69 |
+
|
70 |
+
If merge_slashes is True, merge multiple consecutive slashes in the
|
71 |
+
path into one.
|
72 |
+
"""
|
73 |
+
correct_configuration = not (append_slash and remove_slash)
|
74 |
+
assert correct_configuration, "Cannot both remove and append slash"
|
75 |
+
|
76 |
+
@middleware
|
77 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
78 |
+
if isinstance(request.match_info.route, SystemRoute):
|
79 |
+
paths_to_check = []
|
80 |
+
if "?" in request.raw_path:
|
81 |
+
path, query = request.raw_path.split("?", 1)
|
82 |
+
query = "?" + query
|
83 |
+
else:
|
84 |
+
query = ""
|
85 |
+
path = request.raw_path
|
86 |
+
|
87 |
+
if merge_slashes:
|
88 |
+
paths_to_check.append(re.sub("//+", "/", path))
|
89 |
+
if append_slash and not request.path.endswith("/"):
|
90 |
+
paths_to_check.append(path + "/")
|
91 |
+
if remove_slash and request.path.endswith("/"):
|
92 |
+
paths_to_check.append(path[:-1])
|
93 |
+
if merge_slashes and append_slash:
|
94 |
+
paths_to_check.append(re.sub("//+", "/", path + "/"))
|
95 |
+
if merge_slashes and remove_slash:
|
96 |
+
merged_slashes = re.sub("//+", "/", path)
|
97 |
+
paths_to_check.append(merged_slashes[:-1])
|
98 |
+
|
99 |
+
for path in paths_to_check:
|
100 |
+
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
|
101 |
+
resolves, request = await _check_request_resolves(request, path)
|
102 |
+
if resolves:
|
103 |
+
raise redirect_class(request.raw_path + query)
|
104 |
+
|
105 |
+
return await handler(request)
|
106 |
+
|
107 |
+
return impl
|
108 |
+
|
109 |
+
|
110 |
+
def _fix_request_current_app(app: "Application") -> Middleware:
|
111 |
+
@middleware
|
112 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
113 |
+
with request.match_info.set_current_app(app):
|
114 |
+
return await handler(request)
|
115 |
+
|
116 |
+
return impl
|
llmeval-env/lib/python3.10/site-packages/aiohttp/web_response.py
ADDED
@@ -0,0 +1,819 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import collections.abc
|
3 |
+
import datetime
|
4 |
+
import enum
|
5 |
+
import json
|
6 |
+
import math
|
7 |
+
import time
|
8 |
+
import warnings
|
9 |
+
from concurrent.futures import Executor
|
10 |
+
from http import HTTPStatus
|
11 |
+
from http.cookies import SimpleCookie
|
12 |
+
from typing import (
|
13 |
+
TYPE_CHECKING,
|
14 |
+
Any,
|
15 |
+
Dict,
|
16 |
+
Iterator,
|
17 |
+
MutableMapping,
|
18 |
+
Optional,
|
19 |
+
Union,
|
20 |
+
cast,
|
21 |
+
)
|
22 |
+
|
23 |
+
from multidict import CIMultiDict, istr
|
24 |
+
|
25 |
+
from . import hdrs, payload
|
26 |
+
from .abc import AbstractStreamWriter
|
27 |
+
from .compression_utils import ZLibCompressor
|
28 |
+
from .helpers import (
|
29 |
+
ETAG_ANY,
|
30 |
+
QUOTED_ETAG_RE,
|
31 |
+
ETag,
|
32 |
+
HeadersMixin,
|
33 |
+
must_be_empty_body,
|
34 |
+
parse_http_date,
|
35 |
+
rfc822_formatted_time,
|
36 |
+
sentinel,
|
37 |
+
should_remove_content_length,
|
38 |
+
validate_etag_value,
|
39 |
+
)
|
40 |
+
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
|
41 |
+
from .payload import Payload
|
42 |
+
from .typedefs import JSONEncoder, LooseHeaders
|
43 |
+
|
44 |
+
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
|
45 |
+
|
46 |
+
|
47 |
+
if TYPE_CHECKING:
|
48 |
+
from .web_request import BaseRequest
|
49 |
+
|
50 |
+
BaseClass = MutableMapping[str, Any]
|
51 |
+
else:
|
52 |
+
BaseClass = collections.abc.MutableMapping
|
53 |
+
|
54 |
+
|
55 |
+
class ContentCoding(enum.Enum):
|
56 |
+
# The content codings that we have support for.
|
57 |
+
#
|
58 |
+
# Additional registered codings are listed at:
|
59 |
+
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
|
60 |
+
deflate = "deflate"
|
61 |
+
gzip = "gzip"
|
62 |
+
identity = "identity"
|
63 |
+
|
64 |
+
|
65 |
+
############################################################
|
66 |
+
# HTTP Response classes
|
67 |
+
############################################################
|
68 |
+
|
69 |
+
|
70 |
+
class StreamResponse(BaseClass, HeadersMixin):
|
71 |
+
|
72 |
+
_length_check = True
|
73 |
+
|
74 |
+
def __init__(
|
75 |
+
self,
|
76 |
+
*,
|
77 |
+
status: int = 200,
|
78 |
+
reason: Optional[str] = None,
|
79 |
+
headers: Optional[LooseHeaders] = None,
|
80 |
+
) -> None:
|
81 |
+
self._body = None
|
82 |
+
self._keep_alive: Optional[bool] = None
|
83 |
+
self._chunked = False
|
84 |
+
self._compression = False
|
85 |
+
self._compression_force: Optional[ContentCoding] = None
|
86 |
+
self._cookies = SimpleCookie()
|
87 |
+
|
88 |
+
self._req: Optional[BaseRequest] = None
|
89 |
+
self._payload_writer: Optional[AbstractStreamWriter] = None
|
90 |
+
self._eof_sent = False
|
91 |
+
self._must_be_empty_body: Optional[bool] = None
|
92 |
+
self._body_length = 0
|
93 |
+
self._state: Dict[str, Any] = {}
|
94 |
+
|
95 |
+
if headers is not None:
|
96 |
+
self._headers: CIMultiDict[str] = CIMultiDict(headers)
|
97 |
+
else:
|
98 |
+
self._headers = CIMultiDict()
|
99 |
+
|
100 |
+
self.set_status(status, reason)
|
101 |
+
|
102 |
+
@property
|
103 |
+
def prepared(self) -> bool:
|
104 |
+
return self._payload_writer is not None
|
105 |
+
|
106 |
+
@property
|
107 |
+
def task(self) -> "Optional[asyncio.Task[None]]":
|
108 |
+
if self._req:
|
109 |
+
return self._req.task
|
110 |
+
else:
|
111 |
+
return None
|
112 |
+
|
113 |
+
@property
|
114 |
+
def status(self) -> int:
|
115 |
+
return self._status
|
116 |
+
|
117 |
+
@property
|
118 |
+
def chunked(self) -> bool:
|
119 |
+
return self._chunked
|
120 |
+
|
121 |
+
@property
|
122 |
+
def compression(self) -> bool:
|
123 |
+
return self._compression
|
124 |
+
|
125 |
+
@property
|
126 |
+
def reason(self) -> str:
|
127 |
+
return self._reason
|
128 |
+
|
129 |
+
def set_status(
|
130 |
+
self,
|
131 |
+
status: int,
|
132 |
+
reason: Optional[str] = None,
|
133 |
+
) -> None:
|
134 |
+
assert not self.prepared, (
|
135 |
+
"Cannot change the response status code after " "the headers have been sent"
|
136 |
+
)
|
137 |
+
self._status = int(status)
|
138 |
+
if reason is None:
|
139 |
+
try:
|
140 |
+
reason = HTTPStatus(self._status).phrase
|
141 |
+
except ValueError:
|
142 |
+
reason = ""
|
143 |
+
self._reason = reason
|
144 |
+
|
145 |
+
@property
|
146 |
+
def keep_alive(self) -> Optional[bool]:
|
147 |
+
return self._keep_alive
|
148 |
+
|
149 |
+
def force_close(self) -> None:
|
150 |
+
self._keep_alive = False
|
151 |
+
|
152 |
+
@property
|
153 |
+
def body_length(self) -> int:
|
154 |
+
return self._body_length
|
155 |
+
|
156 |
+
@property
|
157 |
+
def output_length(self) -> int:
|
158 |
+
warnings.warn("output_length is deprecated", DeprecationWarning)
|
159 |
+
assert self._payload_writer
|
160 |
+
return self._payload_writer.buffer_size
|
161 |
+
|
162 |
+
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
|
163 |
+
"""Enables automatic chunked transfer encoding."""
|
164 |
+
self._chunked = True
|
165 |
+
|
166 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
167 |
+
raise RuntimeError(
|
168 |
+
"You can't enable chunked encoding when " "a content length is set"
|
169 |
+
)
|
170 |
+
if chunk_size is not None:
|
171 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
172 |
+
|
173 |
+
def enable_compression(
|
174 |
+
self, force: Optional[Union[bool, ContentCoding]] = None
|
175 |
+
) -> None:
|
176 |
+
"""Enables response compression encoding."""
|
177 |
+
# Backwards compatibility for when force was a bool <0.17.
|
178 |
+
if type(force) == bool:
|
179 |
+
force = ContentCoding.deflate if force else ContentCoding.identity
|
180 |
+
warnings.warn(
|
181 |
+
"Using boolean for force is deprecated #3318", DeprecationWarning
|
182 |
+
)
|
183 |
+
elif force is not None:
|
184 |
+
assert isinstance(force, ContentCoding), (
|
185 |
+
"force should one of " "None, bool or " "ContentEncoding"
|
186 |
+
)
|
187 |
+
|
188 |
+
self._compression = True
|
189 |
+
self._compression_force = force
|
190 |
+
|
191 |
+
@property
|
192 |
+
def headers(self) -> "CIMultiDict[str]":
|
193 |
+
return self._headers
|
194 |
+
|
195 |
+
@property
|
196 |
+
def cookies(self) -> SimpleCookie:
|
197 |
+
return self._cookies
|
198 |
+
|
199 |
+
def set_cookie(
|
200 |
+
self,
|
201 |
+
name: str,
|
202 |
+
value: str,
|
203 |
+
*,
|
204 |
+
expires: Optional[str] = None,
|
205 |
+
domain: Optional[str] = None,
|
206 |
+
max_age: Optional[Union[int, str]] = None,
|
207 |
+
path: str = "/",
|
208 |
+
secure: Optional[bool] = None,
|
209 |
+
httponly: Optional[bool] = None,
|
210 |
+
version: Optional[str] = None,
|
211 |
+
samesite: Optional[str] = None,
|
212 |
+
) -> None:
|
213 |
+
"""Set or update response cookie.
|
214 |
+
|
215 |
+
Sets new cookie or updates existent with new value.
|
216 |
+
Also updates only those params which are not None.
|
217 |
+
"""
|
218 |
+
old = self._cookies.get(name)
|
219 |
+
if old is not None and old.coded_value == "":
|
220 |
+
# deleted cookie
|
221 |
+
self._cookies.pop(name, None)
|
222 |
+
|
223 |
+
self._cookies[name] = value
|
224 |
+
c = self._cookies[name]
|
225 |
+
|
226 |
+
if expires is not None:
|
227 |
+
c["expires"] = expires
|
228 |
+
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
|
229 |
+
del c["expires"]
|
230 |
+
|
231 |
+
if domain is not None:
|
232 |
+
c["domain"] = domain
|
233 |
+
|
234 |
+
if max_age is not None:
|
235 |
+
c["max-age"] = str(max_age)
|
236 |
+
elif "max-age" in c:
|
237 |
+
del c["max-age"]
|
238 |
+
|
239 |
+
c["path"] = path
|
240 |
+
|
241 |
+
if secure is not None:
|
242 |
+
c["secure"] = secure
|
243 |
+
if httponly is not None:
|
244 |
+
c["httponly"] = httponly
|
245 |
+
if version is not None:
|
246 |
+
c["version"] = version
|
247 |
+
if samesite is not None:
|
248 |
+
c["samesite"] = samesite
|
249 |
+
|
250 |
+
def del_cookie(
|
251 |
+
self, name: str, *, domain: Optional[str] = None, path: str = "/"
|
252 |
+
) -> None:
|
253 |
+
"""Delete cookie.
|
254 |
+
|
255 |
+
Creates new empty expired cookie.
|
256 |
+
"""
|
257 |
+
# TODO: do we need domain/path here?
|
258 |
+
self._cookies.pop(name, None)
|
259 |
+
self.set_cookie(
|
260 |
+
name,
|
261 |
+
"",
|
262 |
+
max_age=0,
|
263 |
+
expires="Thu, 01 Jan 1970 00:00:00 GMT",
|
264 |
+
domain=domain,
|
265 |
+
path=path,
|
266 |
+
)
|
267 |
+
|
268 |
+
@property
|
269 |
+
def content_length(self) -> Optional[int]:
|
270 |
+
# Just a placeholder for adding setter
|
271 |
+
return super().content_length
|
272 |
+
|
273 |
+
@content_length.setter
|
274 |
+
def content_length(self, value: Optional[int]) -> None:
|
275 |
+
if value is not None:
|
276 |
+
value = int(value)
|
277 |
+
if self._chunked:
|
278 |
+
raise RuntimeError(
|
279 |
+
"You can't set content length when " "chunked encoding is enable"
|
280 |
+
)
|
281 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(value)
|
282 |
+
else:
|
283 |
+
self._headers.pop(hdrs.CONTENT_LENGTH, None)
|
284 |
+
|
285 |
+
@property
|
286 |
+
def content_type(self) -> str:
|
287 |
+
# Just a placeholder for adding setter
|
288 |
+
return super().content_type
|
289 |
+
|
290 |
+
@content_type.setter
|
291 |
+
def content_type(self, value: str) -> None:
|
292 |
+
self.content_type # read header values if needed
|
293 |
+
self._content_type = str(value)
|
294 |
+
self._generate_content_type_header()
|
295 |
+
|
296 |
+
@property
|
297 |
+
def charset(self) -> Optional[str]:
|
298 |
+
# Just a placeholder for adding setter
|
299 |
+
return super().charset
|
300 |
+
|
301 |
+
@charset.setter
|
302 |
+
def charset(self, value: Optional[str]) -> None:
|
303 |
+
ctype = self.content_type # read header values if needed
|
304 |
+
if ctype == "application/octet-stream":
|
305 |
+
raise RuntimeError(
|
306 |
+
"Setting charset for application/octet-stream "
|
307 |
+
"doesn't make sense, setup content_type first"
|
308 |
+
)
|
309 |
+
assert self._content_dict is not None
|
310 |
+
if value is None:
|
311 |
+
self._content_dict.pop("charset", None)
|
312 |
+
else:
|
313 |
+
self._content_dict["charset"] = str(value).lower()
|
314 |
+
self._generate_content_type_header()
|
315 |
+
|
316 |
+
@property
|
317 |
+
def last_modified(self) -> Optional[datetime.datetime]:
|
318 |
+
"""The value of Last-Modified HTTP header, or None.
|
319 |
+
|
320 |
+
This header is represented as a `datetime` object.
|
321 |
+
"""
|
322 |
+
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
|
323 |
+
|
324 |
+
@last_modified.setter
|
325 |
+
def last_modified(
|
326 |
+
self, value: Optional[Union[int, float, datetime.datetime, str]]
|
327 |
+
) -> None:
|
328 |
+
if value is None:
|
329 |
+
self._headers.pop(hdrs.LAST_MODIFIED, None)
|
330 |
+
elif isinstance(value, (int, float)):
|
331 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
332 |
+
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
|
333 |
+
)
|
334 |
+
elif isinstance(value, datetime.datetime):
|
335 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
336 |
+
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
|
337 |
+
)
|
338 |
+
elif isinstance(value, str):
|
339 |
+
self._headers[hdrs.LAST_MODIFIED] = value
|
340 |
+
|
341 |
+
@property
|
342 |
+
def etag(self) -> Optional[ETag]:
|
343 |
+
quoted_value = self._headers.get(hdrs.ETAG)
|
344 |
+
if not quoted_value:
|
345 |
+
return None
|
346 |
+
elif quoted_value == ETAG_ANY:
|
347 |
+
return ETag(value=ETAG_ANY)
|
348 |
+
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
|
349 |
+
if not match:
|
350 |
+
return None
|
351 |
+
is_weak, value = match.group(1, 2)
|
352 |
+
return ETag(
|
353 |
+
is_weak=bool(is_weak),
|
354 |
+
value=value,
|
355 |
+
)
|
356 |
+
|
357 |
+
@etag.setter
|
358 |
+
def etag(self, value: Optional[Union[ETag, str]]) -> None:
|
359 |
+
if value is None:
|
360 |
+
self._headers.pop(hdrs.ETAG, None)
|
361 |
+
elif (isinstance(value, str) and value == ETAG_ANY) or (
|
362 |
+
isinstance(value, ETag) and value.value == ETAG_ANY
|
363 |
+
):
|
364 |
+
self._headers[hdrs.ETAG] = ETAG_ANY
|
365 |
+
elif isinstance(value, str):
|
366 |
+
validate_etag_value(value)
|
367 |
+
self._headers[hdrs.ETAG] = f'"{value}"'
|
368 |
+
elif isinstance(value, ETag) and isinstance(value.value, str):
|
369 |
+
validate_etag_value(value.value)
|
370 |
+
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
|
371 |
+
self._headers[hdrs.ETAG] = hdr_value
|
372 |
+
else:
|
373 |
+
raise ValueError(
|
374 |
+
f"Unsupported etag type: {type(value)}. "
|
375 |
+
f"etag must be str, ETag or None"
|
376 |
+
)
|
377 |
+
|
378 |
+
def _generate_content_type_header(
|
379 |
+
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
|
380 |
+
) -> None:
|
381 |
+
assert self._content_dict is not None
|
382 |
+
assert self._content_type is not None
|
383 |
+
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
|
384 |
+
if params:
|
385 |
+
ctype = self._content_type + "; " + params
|
386 |
+
else:
|
387 |
+
ctype = self._content_type
|
388 |
+
self._headers[CONTENT_TYPE] = ctype
|
389 |
+
|
390 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
391 |
+
if coding != ContentCoding.identity:
|
392 |
+
assert self._payload_writer is not None
|
393 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
394 |
+
self._payload_writer.enable_compression(coding.value)
|
395 |
+
# Compressed payload may have different content length,
|
396 |
+
# remove the header
|
397 |
+
self._headers.popall(hdrs.CONTENT_LENGTH, None)
|
398 |
+
|
399 |
+
async def _start_compression(self, request: "BaseRequest") -> None:
|
400 |
+
if self._compression_force:
|
401 |
+
await self._do_start_compression(self._compression_force)
|
402 |
+
else:
|
403 |
+
# Encoding comparisons should be case-insensitive
|
404 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
|
405 |
+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
406 |
+
for coding in ContentCoding:
|
407 |
+
if coding.value in accept_encoding:
|
408 |
+
await self._do_start_compression(coding)
|
409 |
+
return
|
410 |
+
|
411 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
412 |
+
if self._eof_sent:
|
413 |
+
return None
|
414 |
+
if self._payload_writer is not None:
|
415 |
+
return self._payload_writer
|
416 |
+
self._must_be_empty_body = must_be_empty_body(request.method, self.status)
|
417 |
+
return await self._start(request)
|
418 |
+
|
419 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
420 |
+
self._req = request
|
421 |
+
writer = self._payload_writer = request._payload_writer
|
422 |
+
|
423 |
+
await self._prepare_headers()
|
424 |
+
await request._prepare_hook(self)
|
425 |
+
await self._write_headers()
|
426 |
+
|
427 |
+
return writer
|
428 |
+
|
429 |
+
async def _prepare_headers(self) -> None:
|
430 |
+
request = self._req
|
431 |
+
assert request is not None
|
432 |
+
writer = self._payload_writer
|
433 |
+
assert writer is not None
|
434 |
+
keep_alive = self._keep_alive
|
435 |
+
if keep_alive is None:
|
436 |
+
keep_alive = request.keep_alive
|
437 |
+
self._keep_alive = keep_alive
|
438 |
+
|
439 |
+
version = request.version
|
440 |
+
|
441 |
+
headers = self._headers
|
442 |
+
for cookie in self._cookies.values():
|
443 |
+
value = cookie.output(header="")[1:]
|
444 |
+
headers.add(hdrs.SET_COOKIE, value)
|
445 |
+
|
446 |
+
if self._compression:
|
447 |
+
await self._start_compression(request)
|
448 |
+
|
449 |
+
if self._chunked:
|
450 |
+
if version != HttpVersion11:
|
451 |
+
raise RuntimeError(
|
452 |
+
"Using chunked encoding is forbidden "
|
453 |
+
"for HTTP/{0.major}.{0.minor}".format(request.version)
|
454 |
+
)
|
455 |
+
if not self._must_be_empty_body:
|
456 |
+
writer.enable_chunking()
|
457 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
458 |
+
if hdrs.CONTENT_LENGTH in headers:
|
459 |
+
del headers[hdrs.CONTENT_LENGTH]
|
460 |
+
elif self._length_check:
|
461 |
+
writer.length = self.content_length
|
462 |
+
if writer.length is None:
|
463 |
+
if version >= HttpVersion11:
|
464 |
+
if not self._must_be_empty_body:
|
465 |
+
writer.enable_chunking()
|
466 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
467 |
+
elif not self._must_be_empty_body:
|
468 |
+
keep_alive = False
|
469 |
+
|
470 |
+
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
|
471 |
+
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
|
472 |
+
if self._must_be_empty_body:
|
473 |
+
if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
|
474 |
+
request.method, self.status
|
475 |
+
):
|
476 |
+
del headers[hdrs.CONTENT_LENGTH]
|
477 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
|
478 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
|
479 |
+
if hdrs.TRANSFER_ENCODING in headers:
|
480 |
+
del headers[hdrs.TRANSFER_ENCODING]
|
481 |
+
else:
|
482 |
+
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
|
483 |
+
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
|
484 |
+
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
|
485 |
+
|
486 |
+
# connection header
|
487 |
+
if hdrs.CONNECTION not in headers:
|
488 |
+
if keep_alive:
|
489 |
+
if version == HttpVersion10:
|
490 |
+
headers[hdrs.CONNECTION] = "keep-alive"
|
491 |
+
else:
|
492 |
+
if version == HttpVersion11:
|
493 |
+
headers[hdrs.CONNECTION] = "close"
|
494 |
+
|
495 |
+
async def _write_headers(self) -> None:
|
496 |
+
request = self._req
|
497 |
+
assert request is not None
|
498 |
+
writer = self._payload_writer
|
499 |
+
assert writer is not None
|
500 |
+
# status line
|
501 |
+
version = request.version
|
502 |
+
status_line = "HTTP/{}.{} {} {}".format(
|
503 |
+
version[0], version[1], self._status, self._reason
|
504 |
+
)
|
505 |
+
await writer.write_headers(status_line, self._headers)
|
506 |
+
|
507 |
+
async def write(self, data: bytes) -> None:
|
508 |
+
assert isinstance(
|
509 |
+
data, (bytes, bytearray, memoryview)
|
510 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
511 |
+
|
512 |
+
if self._eof_sent:
|
513 |
+
raise RuntimeError("Cannot call write() after write_eof()")
|
514 |
+
if self._payload_writer is None:
|
515 |
+
raise RuntimeError("Cannot call write() before prepare()")
|
516 |
+
|
517 |
+
await self._payload_writer.write(data)
|
518 |
+
|
519 |
+
async def drain(self) -> None:
|
520 |
+
assert not self._eof_sent, "EOF has already been sent"
|
521 |
+
assert self._payload_writer is not None, "Response has not been started"
|
522 |
+
warnings.warn(
|
523 |
+
"drain method is deprecated, use await resp.write()",
|
524 |
+
DeprecationWarning,
|
525 |
+
stacklevel=2,
|
526 |
+
)
|
527 |
+
await self._payload_writer.drain()
|
528 |
+
|
529 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
530 |
+
assert isinstance(
|
531 |
+
data, (bytes, bytearray, memoryview)
|
532 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
533 |
+
|
534 |
+
if self._eof_sent:
|
535 |
+
return
|
536 |
+
|
537 |
+
assert self._payload_writer is not None, "Response has not been started"
|
538 |
+
|
539 |
+
await self._payload_writer.write_eof(data)
|
540 |
+
self._eof_sent = True
|
541 |
+
self._req = None
|
542 |
+
self._body_length = self._payload_writer.output_size
|
543 |
+
self._payload_writer = None
|
544 |
+
|
545 |
+
def __repr__(self) -> str:
|
546 |
+
if self._eof_sent:
|
547 |
+
info = "eof"
|
548 |
+
elif self.prepared:
|
549 |
+
assert self._req is not None
|
550 |
+
info = f"{self._req.method} {self._req.path} "
|
551 |
+
else:
|
552 |
+
info = "not prepared"
|
553 |
+
return f"<{self.__class__.__name__} {self.reason} {info}>"
|
554 |
+
|
555 |
+
def __getitem__(self, key: str) -> Any:
|
556 |
+
return self._state[key]
|
557 |
+
|
558 |
+
def __setitem__(self, key: str, value: Any) -> None:
|
559 |
+
self._state[key] = value
|
560 |
+
|
561 |
+
def __delitem__(self, key: str) -> None:
|
562 |
+
del self._state[key]
|
563 |
+
|
564 |
+
def __len__(self) -> int:
|
565 |
+
return len(self._state)
|
566 |
+
|
567 |
+
def __iter__(self) -> Iterator[str]:
|
568 |
+
return iter(self._state)
|
569 |
+
|
570 |
+
def __hash__(self) -> int:
|
571 |
+
return hash(id(self))
|
572 |
+
|
573 |
+
def __eq__(self, other: object) -> bool:
|
574 |
+
return self is other
|
575 |
+
|
576 |
+
|
577 |
+
class Response(StreamResponse):
|
578 |
+
def __init__(
|
579 |
+
self,
|
580 |
+
*,
|
581 |
+
body: Any = None,
|
582 |
+
status: int = 200,
|
583 |
+
reason: Optional[str] = None,
|
584 |
+
text: Optional[str] = None,
|
585 |
+
headers: Optional[LooseHeaders] = None,
|
586 |
+
content_type: Optional[str] = None,
|
587 |
+
charset: Optional[str] = None,
|
588 |
+
zlib_executor_size: Optional[int] = None,
|
589 |
+
zlib_executor: Optional[Executor] = None,
|
590 |
+
) -> None:
|
591 |
+
if body is not None and text is not None:
|
592 |
+
raise ValueError("body and text are not allowed together")
|
593 |
+
|
594 |
+
if headers is None:
|
595 |
+
real_headers: CIMultiDict[str] = CIMultiDict()
|
596 |
+
elif not isinstance(headers, CIMultiDict):
|
597 |
+
real_headers = CIMultiDict(headers)
|
598 |
+
else:
|
599 |
+
real_headers = headers # = cast('CIMultiDict[str]', headers)
|
600 |
+
|
601 |
+
if content_type is not None and "charset" in content_type:
|
602 |
+
raise ValueError("charset must not be in content_type " "argument")
|
603 |
+
|
604 |
+
if text is not None:
|
605 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
606 |
+
if content_type or charset:
|
607 |
+
raise ValueError(
|
608 |
+
"passing both Content-Type header and "
|
609 |
+
"content_type or charset params "
|
610 |
+
"is forbidden"
|
611 |
+
)
|
612 |
+
else:
|
613 |
+
# fast path for filling headers
|
614 |
+
if not isinstance(text, str):
|
615 |
+
raise TypeError("text argument must be str (%r)" % type(text))
|
616 |
+
if content_type is None:
|
617 |
+
content_type = "text/plain"
|
618 |
+
if charset is None:
|
619 |
+
charset = "utf-8"
|
620 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
|
621 |
+
body = text.encode(charset)
|
622 |
+
text = None
|
623 |
+
else:
|
624 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
625 |
+
if content_type is not None or charset is not None:
|
626 |
+
raise ValueError(
|
627 |
+
"passing both Content-Type header and "
|
628 |
+
"content_type or charset params "
|
629 |
+
"is forbidden"
|
630 |
+
)
|
631 |
+
else:
|
632 |
+
if content_type is not None:
|
633 |
+
if charset is not None:
|
634 |
+
content_type += "; charset=" + charset
|
635 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type
|
636 |
+
|
637 |
+
super().__init__(status=status, reason=reason, headers=real_headers)
|
638 |
+
|
639 |
+
if text is not None:
|
640 |
+
self.text = text
|
641 |
+
else:
|
642 |
+
self.body = body
|
643 |
+
|
644 |
+
self._compressed_body: Optional[bytes] = None
|
645 |
+
self._zlib_executor_size = zlib_executor_size
|
646 |
+
self._zlib_executor = zlib_executor
|
647 |
+
|
648 |
+
@property
|
649 |
+
def body(self) -> Optional[Union[bytes, Payload]]:
|
650 |
+
return self._body
|
651 |
+
|
652 |
+
@body.setter
|
653 |
+
def body(self, body: bytes) -> None:
|
654 |
+
if body is None:
|
655 |
+
self._body: Optional[bytes] = None
|
656 |
+
self._body_payload: bool = False
|
657 |
+
elif isinstance(body, (bytes, bytearray)):
|
658 |
+
self._body = body
|
659 |
+
self._body_payload = False
|
660 |
+
else:
|
661 |
+
try:
|
662 |
+
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
|
663 |
+
except payload.LookupError:
|
664 |
+
raise ValueError("Unsupported body type %r" % type(body))
|
665 |
+
|
666 |
+
self._body_payload = True
|
667 |
+
|
668 |
+
headers = self._headers
|
669 |
+
|
670 |
+
# set content-type
|
671 |
+
if hdrs.CONTENT_TYPE not in headers:
|
672 |
+
headers[hdrs.CONTENT_TYPE] = body.content_type
|
673 |
+
|
674 |
+
# copy payload headers
|
675 |
+
if body.headers:
|
676 |
+
for (key, value) in body.headers.items():
|
677 |
+
if key not in headers:
|
678 |
+
headers[key] = value
|
679 |
+
|
680 |
+
self._compressed_body = None
|
681 |
+
|
682 |
+
@property
|
683 |
+
def text(self) -> Optional[str]:
|
684 |
+
if self._body is None:
|
685 |
+
return None
|
686 |
+
return self._body.decode(self.charset or "utf-8")
|
687 |
+
|
688 |
+
@text.setter
|
689 |
+
def text(self, text: str) -> None:
|
690 |
+
assert text is None or isinstance(
|
691 |
+
text, str
|
692 |
+
), "text argument must be str (%r)" % type(text)
|
693 |
+
|
694 |
+
if self.content_type == "application/octet-stream":
|
695 |
+
self.content_type = "text/plain"
|
696 |
+
if self.charset is None:
|
697 |
+
self.charset = "utf-8"
|
698 |
+
|
699 |
+
self._body = text.encode(self.charset)
|
700 |
+
self._body_payload = False
|
701 |
+
self._compressed_body = None
|
702 |
+
|
703 |
+
@property
|
704 |
+
def content_length(self) -> Optional[int]:
|
705 |
+
if self._chunked:
|
706 |
+
return None
|
707 |
+
|
708 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
709 |
+
return super().content_length
|
710 |
+
|
711 |
+
if self._compressed_body is not None:
|
712 |
+
# Return length of the compressed body
|
713 |
+
return len(self._compressed_body)
|
714 |
+
elif self._body_payload:
|
715 |
+
# A payload without content length, or a compressed payload
|
716 |
+
return None
|
717 |
+
elif self._body is not None:
|
718 |
+
return len(self._body)
|
719 |
+
else:
|
720 |
+
return 0
|
721 |
+
|
722 |
+
@content_length.setter
|
723 |
+
def content_length(self, value: Optional[int]) -> None:
|
724 |
+
raise RuntimeError("Content length is set automatically")
|
725 |
+
|
726 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
727 |
+
if self._eof_sent:
|
728 |
+
return
|
729 |
+
if self._compressed_body is None:
|
730 |
+
body: Optional[Union[bytes, Payload]] = self._body
|
731 |
+
else:
|
732 |
+
body = self._compressed_body
|
733 |
+
assert not data, f"data arg is not supported, got {data!r}"
|
734 |
+
assert self._req is not None
|
735 |
+
assert self._payload_writer is not None
|
736 |
+
if body is not None:
|
737 |
+
if self._must_be_empty_body:
|
738 |
+
await super().write_eof()
|
739 |
+
elif self._body_payload:
|
740 |
+
payload = cast(Payload, body)
|
741 |
+
await payload.write(self._payload_writer)
|
742 |
+
await super().write_eof()
|
743 |
+
else:
|
744 |
+
await super().write_eof(cast(bytes, body))
|
745 |
+
else:
|
746 |
+
await super().write_eof()
|
747 |
+
|
748 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
749 |
+
if should_remove_content_length(request.method, self.status):
|
750 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
751 |
+
del self._headers[hdrs.CONTENT_LENGTH]
|
752 |
+
elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
|
753 |
+
if self._body_payload:
|
754 |
+
size = cast(Payload, self._body).size
|
755 |
+
if size is not None:
|
756 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(size)
|
757 |
+
else:
|
758 |
+
body_len = len(self._body) if self._body else "0"
|
759 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
|
760 |
+
if body_len != "0" or (
|
761 |
+
self.status != 304 and request.method.upper() != hdrs.METH_HEAD
|
762 |
+
):
|
763 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
|
764 |
+
|
765 |
+
return await super()._start(request)
|
766 |
+
|
767 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
768 |
+
if self._body_payload or self._chunked:
|
769 |
+
return await super()._do_start_compression(coding)
|
770 |
+
|
771 |
+
if coding != ContentCoding.identity:
|
772 |
+
# Instead of using _payload_writer.enable_compression,
|
773 |
+
# compress the whole body
|
774 |
+
compressor = ZLibCompressor(
|
775 |
+
encoding=str(coding.value),
|
776 |
+
max_sync_chunk_size=self._zlib_executor_size,
|
777 |
+
executor=self._zlib_executor,
|
778 |
+
)
|
779 |
+
assert self._body is not None
|
780 |
+
if self._zlib_executor_size is None and len(self._body) > 1024 * 1024:
|
781 |
+
warnings.warn(
|
782 |
+
"Synchronous compression of large response bodies "
|
783 |
+
f"({len(self._body)} bytes) might block the async event loop. "
|
784 |
+
"Consider providing a custom value to zlib_executor_size/"
|
785 |
+
"zlib_executor response properties or disabling compression on it."
|
786 |
+
)
|
787 |
+
self._compressed_body = (
|
788 |
+
await compressor.compress(self._body) + compressor.flush()
|
789 |
+
)
|
790 |
+
assert self._compressed_body is not None
|
791 |
+
|
792 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
793 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
|
794 |
+
|
795 |
+
|
796 |
+
def json_response(
|
797 |
+
data: Any = sentinel,
|
798 |
+
*,
|
799 |
+
text: Optional[str] = None,
|
800 |
+
body: Optional[bytes] = None,
|
801 |
+
status: int = 200,
|
802 |
+
reason: Optional[str] = None,
|
803 |
+
headers: Optional[LooseHeaders] = None,
|
804 |
+
content_type: str = "application/json",
|
805 |
+
dumps: JSONEncoder = json.dumps,
|
806 |
+
) -> Response:
|
807 |
+
if data is not sentinel:
|
808 |
+
if text or body:
|
809 |
+
raise ValueError("only one of data, text, or body should be specified")
|
810 |
+
else:
|
811 |
+
text = dumps(data)
|
812 |
+
return Response(
|
813 |
+
text=text,
|
814 |
+
body=body,
|
815 |
+
status=status,
|
816 |
+
reason=reason,
|
817 |
+
headers=headers,
|
818 |
+
content_type=content_type,
|
819 |
+
)
|
llmeval-env/lib/python3.10/site-packages/chardet/__init__.py
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# This library is free software; you can redistribute it and/or
|
3 |
+
# modify it under the terms of the GNU Lesser General Public
|
4 |
+
# License as published by the Free Software Foundation; either
|
5 |
+
# version 2.1 of the License, or (at your option) any later version.
|
6 |
+
#
|
7 |
+
# This library is distributed in the hope that it will be useful,
|
8 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
9 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
10 |
+
# Lesser General Public License for more details.
|
11 |
+
#
|
12 |
+
# You should have received a copy of the GNU Lesser General Public
|
13 |
+
# License along with this library; if not, write to the Free Software
|
14 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
15 |
+
# 02110-1301 USA
|
16 |
+
######################### END LICENSE BLOCK #########################
|
17 |
+
|
18 |
+
from typing import List, Union
|
19 |
+
|
20 |
+
from .charsetgroupprober import CharSetGroupProber
|
21 |
+
from .charsetprober import CharSetProber
|
22 |
+
from .enums import InputState
|
23 |
+
from .resultdict import ResultDict
|
24 |
+
from .universaldetector import UniversalDetector
|
25 |
+
from .version import VERSION, __version__
|
26 |
+
|
27 |
+
__all__ = ["UniversalDetector", "detect", "detect_all", "__version__", "VERSION"]
|
28 |
+
|
29 |
+
|
30 |
+
def detect(
|
31 |
+
byte_str: Union[bytes, bytearray], should_rename_legacy: bool = False
|
32 |
+
) -> ResultDict:
|
33 |
+
"""
|
34 |
+
Detect the encoding of the given byte string.
|
35 |
+
|
36 |
+
:param byte_str: The byte sequence to examine.
|
37 |
+
:type byte_str: ``bytes`` or ``bytearray``
|
38 |
+
:param should_rename_legacy: Should we rename legacy encodings
|
39 |
+
to their more modern equivalents?
|
40 |
+
:type should_rename_legacy: ``bool``
|
41 |
+
"""
|
42 |
+
if not isinstance(byte_str, bytearray):
|
43 |
+
if not isinstance(byte_str, bytes):
|
44 |
+
raise TypeError(
|
45 |
+
f"Expected object of type bytes or bytearray, got: {type(byte_str)}"
|
46 |
+
)
|
47 |
+
byte_str = bytearray(byte_str)
|
48 |
+
detector = UniversalDetector(should_rename_legacy=should_rename_legacy)
|
49 |
+
detector.feed(byte_str)
|
50 |
+
return detector.close()
|
51 |
+
|
52 |
+
|
53 |
+
def detect_all(
|
54 |
+
byte_str: Union[bytes, bytearray],
|
55 |
+
ignore_threshold: bool = False,
|
56 |
+
should_rename_legacy: bool = False,
|
57 |
+
) -> List[ResultDict]:
|
58 |
+
"""
|
59 |
+
Detect all the possible encodings of the given byte string.
|
60 |
+
|
61 |
+
:param byte_str: The byte sequence to examine.
|
62 |
+
:type byte_str: ``bytes`` or ``bytearray``
|
63 |
+
:param ignore_threshold: Include encodings that are below
|
64 |
+
``UniversalDetector.MINIMUM_THRESHOLD``
|
65 |
+
in results.
|
66 |
+
:type ignore_threshold: ``bool``
|
67 |
+
:param should_rename_legacy: Should we rename legacy encodings
|
68 |
+
to their more modern equivalents?
|
69 |
+
:type should_rename_legacy: ``bool``
|
70 |
+
"""
|
71 |
+
if not isinstance(byte_str, bytearray):
|
72 |
+
if not isinstance(byte_str, bytes):
|
73 |
+
raise TypeError(
|
74 |
+
f"Expected object of type bytes or bytearray, got: {type(byte_str)}"
|
75 |
+
)
|
76 |
+
byte_str = bytearray(byte_str)
|
77 |
+
|
78 |
+
detector = UniversalDetector(should_rename_legacy=should_rename_legacy)
|
79 |
+
detector.feed(byte_str)
|
80 |
+
detector.close()
|
81 |
+
|
82 |
+
if detector.input_state == InputState.HIGH_BYTE:
|
83 |
+
results: List[ResultDict] = []
|
84 |
+
probers: List[CharSetProber] = []
|
85 |
+
for prober in detector.charset_probers:
|
86 |
+
if isinstance(prober, CharSetGroupProber):
|
87 |
+
probers.extend(p for p in prober.probers)
|
88 |
+
else:
|
89 |
+
probers.append(prober)
|
90 |
+
for prober in probers:
|
91 |
+
if ignore_threshold or prober.get_confidence() > detector.MINIMUM_THRESHOLD:
|
92 |
+
charset_name = prober.charset_name or ""
|
93 |
+
lower_charset_name = charset_name.lower()
|
94 |
+
# Use Windows encoding name instead of ISO-8859 if we saw any
|
95 |
+
# extra Windows-specific bytes
|
96 |
+
if lower_charset_name.startswith("iso-8859") and detector.has_win_bytes:
|
97 |
+
charset_name = detector.ISO_WIN_MAP.get(
|
98 |
+
lower_charset_name, charset_name
|
99 |
+
)
|
100 |
+
# Rename legacy encodings with superset encodings if asked
|
101 |
+
if should_rename_legacy:
|
102 |
+
charset_name = detector.LEGACY_MAP.get(
|
103 |
+
charset_name.lower(), charset_name
|
104 |
+
)
|
105 |
+
results.append(
|
106 |
+
{
|
107 |
+
"encoding": charset_name,
|
108 |
+
"confidence": prober.get_confidence(),
|
109 |
+
"language": prober.language,
|
110 |
+
}
|
111 |
+
)
|
112 |
+
if len(results) > 0:
|
113 |
+
return sorted(results, key=lambda result: -result["confidence"])
|
114 |
+
|
115 |
+
return [detector.result]
|
llmeval-env/lib/python3.10/site-packages/chardet/__main__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Wrapper so people can run python -m chardet"""
|
2 |
+
|
3 |
+
from .cli.chardetect import main
|
4 |
+
|
5 |
+
if __name__ == "__main__":
|
6 |
+
main()
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (3.16 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/big5prober.cpython-310.pyc
ADDED
Binary file (1.15 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/chardistribution.cpython-310.pyc
ADDED
Binary file (6.56 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/charsetgroupprober.cpython-310.pyc
ADDED
Binary file (2.41 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/charsetprober.cpython-310.pyc
ADDED
Binary file (3.82 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/codingstatemachine.cpython-310.pyc
ADDED
Binary file (3.06 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/codingstatemachinedict.cpython-310.pyc
ADDED
Binary file (684 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/cp949prober.cpython-310.pyc
ADDED
Binary file (1.15 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/enums.cpython-310.pyc
ADDED
Binary file (2.65 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/escprober.cpython-310.pyc
ADDED
Binary file (2.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/escsm.cpython-310.pyc
ADDED
Binary file (8.51 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/eucjpprober.cpython-310.pyc
ADDED
Binary file (2.59 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/euctwfreq.cpython-310.pyc
ADDED
Binary file (27.2 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/gb2312prober.cpython-310.pyc
ADDED
Binary file (1.16 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/hebrewprober.cpython-310.pyc
ADDED
Binary file (3.4 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/jisfreq.cpython-310.pyc
ADDED
Binary file (22.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/johabfreq.cpython-310.pyc
ADDED
Binary file (139 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/johabprober.cpython-310.pyc
ADDED
Binary file (1.15 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/jpcntx.cpython-310.pyc
ADDED
Binary file (38 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/langhebrewmodel.cpython-310.pyc
ADDED
Binary file (44.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/langhungarianmodel.cpython-310.pyc
ADDED
Binary file (47.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/langrussianmodel.cpython-310.pyc
ADDED
Binary file (61 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/langturkishmodel.cpython-310.pyc
ADDED
Binary file (44.6 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/latin1prober.cpython-310.pyc
ADDED
Binary file (4.55 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/macromanprober.cpython-310.pyc
ADDED
Binary file (4.69 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/mbcharsetprober.cpython-310.pyc
ADDED
Binary file (2.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc
ADDED
Binary file (1.25 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/mbcssm.cpython-310.pyc
ADDED
Binary file (20.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/sbcharsetprober.cpython-310.pyc
ADDED
Binary file (3.67 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc
ADDED
Binary file (1.73 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/sjisprober.cpython-310.pyc
ADDED
Binary file (2.63 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/universaldetector.cpython-310.pyc
ADDED
Binary file (7.12 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/__pycache__/utf1632prober.cpython-310.pyc
ADDED
Binary file (6.09 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/chardistribution.py
ADDED
@@ -0,0 +1,261 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is Mozilla Communicator client code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 1998
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
from typing import Tuple, Union
|
29 |
+
|
30 |
+
from .big5freq import (
|
31 |
+
BIG5_CHAR_TO_FREQ_ORDER,
|
32 |
+
BIG5_TABLE_SIZE,
|
33 |
+
BIG5_TYPICAL_DISTRIBUTION_RATIO,
|
34 |
+
)
|
35 |
+
from .euckrfreq import (
|
36 |
+
EUCKR_CHAR_TO_FREQ_ORDER,
|
37 |
+
EUCKR_TABLE_SIZE,
|
38 |
+
EUCKR_TYPICAL_DISTRIBUTION_RATIO,
|
39 |
+
)
|
40 |
+
from .euctwfreq import (
|
41 |
+
EUCTW_CHAR_TO_FREQ_ORDER,
|
42 |
+
EUCTW_TABLE_SIZE,
|
43 |
+
EUCTW_TYPICAL_DISTRIBUTION_RATIO,
|
44 |
+
)
|
45 |
+
from .gb2312freq import (
|
46 |
+
GB2312_CHAR_TO_FREQ_ORDER,
|
47 |
+
GB2312_TABLE_SIZE,
|
48 |
+
GB2312_TYPICAL_DISTRIBUTION_RATIO,
|
49 |
+
)
|
50 |
+
from .jisfreq import (
|
51 |
+
JIS_CHAR_TO_FREQ_ORDER,
|
52 |
+
JIS_TABLE_SIZE,
|
53 |
+
JIS_TYPICAL_DISTRIBUTION_RATIO,
|
54 |
+
)
|
55 |
+
from .johabfreq import JOHAB_TO_EUCKR_ORDER_TABLE
|
56 |
+
|
57 |
+
|
58 |
+
class CharDistributionAnalysis:
|
59 |
+
ENOUGH_DATA_THRESHOLD = 1024
|
60 |
+
SURE_YES = 0.99
|
61 |
+
SURE_NO = 0.01
|
62 |
+
MINIMUM_DATA_THRESHOLD = 3
|
63 |
+
|
64 |
+
def __init__(self) -> None:
|
65 |
+
# Mapping table to get frequency order from char order (get from
|
66 |
+
# GetOrder())
|
67 |
+
self._char_to_freq_order: Tuple[int, ...] = tuple()
|
68 |
+
self._table_size = 0 # Size of above table
|
69 |
+
# This is a constant value which varies from language to language,
|
70 |
+
# used in calculating confidence. See
|
71 |
+
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
|
72 |
+
# for further detail.
|
73 |
+
self.typical_distribution_ratio = 0.0
|
74 |
+
self._done = False
|
75 |
+
self._total_chars = 0
|
76 |
+
self._freq_chars = 0
|
77 |
+
self.reset()
|
78 |
+
|
79 |
+
def reset(self) -> None:
|
80 |
+
"""reset analyser, clear any state"""
|
81 |
+
# If this flag is set to True, detection is done and conclusion has
|
82 |
+
# been made
|
83 |
+
self._done = False
|
84 |
+
self._total_chars = 0 # Total characters encountered
|
85 |
+
# The number of characters whose frequency order is less than 512
|
86 |
+
self._freq_chars = 0
|
87 |
+
|
88 |
+
def feed(self, char: Union[bytes, bytearray], char_len: int) -> None:
|
89 |
+
"""feed a character with known length"""
|
90 |
+
if char_len == 2:
|
91 |
+
# we only care about 2-bytes character in our distribution analysis
|
92 |
+
order = self.get_order(char)
|
93 |
+
else:
|
94 |
+
order = -1
|
95 |
+
if order >= 0:
|
96 |
+
self._total_chars += 1
|
97 |
+
# order is valid
|
98 |
+
if order < self._table_size:
|
99 |
+
if 512 > self._char_to_freq_order[order]:
|
100 |
+
self._freq_chars += 1
|
101 |
+
|
102 |
+
def get_confidence(self) -> float:
|
103 |
+
"""return confidence based on existing data"""
|
104 |
+
# if we didn't receive any character in our consideration range,
|
105 |
+
# return negative answer
|
106 |
+
if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
|
107 |
+
return self.SURE_NO
|
108 |
+
|
109 |
+
if self._total_chars != self._freq_chars:
|
110 |
+
r = self._freq_chars / (
|
111 |
+
(self._total_chars - self._freq_chars) * self.typical_distribution_ratio
|
112 |
+
)
|
113 |
+
if r < self.SURE_YES:
|
114 |
+
return r
|
115 |
+
|
116 |
+
# normalize confidence (we don't want to be 100% sure)
|
117 |
+
return self.SURE_YES
|
118 |
+
|
119 |
+
def got_enough_data(self) -> bool:
|
120 |
+
# It is not necessary to receive all data to draw conclusion.
|
121 |
+
# For charset detection, certain amount of data is enough
|
122 |
+
return self._total_chars > self.ENOUGH_DATA_THRESHOLD
|
123 |
+
|
124 |
+
def get_order(self, _: Union[bytes, bytearray]) -> int:
|
125 |
+
# We do not handle characters based on the original encoding string,
|
126 |
+
# but convert this encoding string to a number, here called order.
|
127 |
+
# This allows multiple encodings of a language to share one frequency
|
128 |
+
# table.
|
129 |
+
return -1
|
130 |
+
|
131 |
+
|
132 |
+
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
|
133 |
+
def __init__(self) -> None:
|
134 |
+
super().__init__()
|
135 |
+
self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
|
136 |
+
self._table_size = EUCTW_TABLE_SIZE
|
137 |
+
self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
|
138 |
+
|
139 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
|
140 |
+
# for euc-TW encoding, we are interested
|
141 |
+
# first byte range: 0xc4 -- 0xfe
|
142 |
+
# second byte range: 0xa1 -- 0xfe
|
143 |
+
# no validation needed here. State machine has done that
|
144 |
+
first_char = byte_str[0]
|
145 |
+
if first_char >= 0xC4:
|
146 |
+
return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
|
147 |
+
return -1
|
148 |
+
|
149 |
+
|
150 |
+
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
|
151 |
+
def __init__(self) -> None:
|
152 |
+
super().__init__()
|
153 |
+
self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
|
154 |
+
self._table_size = EUCKR_TABLE_SIZE
|
155 |
+
self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
|
156 |
+
|
157 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
|
158 |
+
# for euc-KR encoding, we are interested
|
159 |
+
# first byte range: 0xb0 -- 0xfe
|
160 |
+
# second byte range: 0xa1 -- 0xfe
|
161 |
+
# no validation needed here. State machine has done that
|
162 |
+
first_char = byte_str[0]
|
163 |
+
if first_char >= 0xB0:
|
164 |
+
return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
|
165 |
+
return -1
|
166 |
+
|
167 |
+
|
168 |
+
class JOHABDistributionAnalysis(CharDistributionAnalysis):
|
169 |
+
def __init__(self) -> None:
|
170 |
+
super().__init__()
|
171 |
+
self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
|
172 |
+
self._table_size = EUCKR_TABLE_SIZE
|
173 |
+
self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
|
174 |
+
|
175 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
|
176 |
+
first_char = byte_str[0]
|
177 |
+
if 0x88 <= first_char < 0xD4:
|
178 |
+
code = first_char * 256 + byte_str[1]
|
179 |
+
return JOHAB_TO_EUCKR_ORDER_TABLE.get(code, -1)
|
180 |
+
return -1
|
181 |
+
|
182 |
+
|
183 |
+
class GB2312DistributionAnalysis(CharDistributionAnalysis):
|
184 |
+
def __init__(self) -> None:
|
185 |
+
super().__init__()
|
186 |
+
self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
|
187 |
+
self._table_size = GB2312_TABLE_SIZE
|
188 |
+
self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
|
189 |
+
|
190 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
|
191 |
+
# for GB2312 encoding, we are interested
|
192 |
+
# first byte range: 0xb0 -- 0xfe
|
193 |
+
# second byte range: 0xa1 -- 0xfe
|
194 |
+
# no validation needed here. State machine has done that
|
195 |
+
first_char, second_char = byte_str[0], byte_str[1]
|
196 |
+
if (first_char >= 0xB0) and (second_char >= 0xA1):
|
197 |
+
return 94 * (first_char - 0xB0) + second_char - 0xA1
|
198 |
+
return -1
|
199 |
+
|
200 |
+
|
201 |
+
class Big5DistributionAnalysis(CharDistributionAnalysis):
|
202 |
+
def __init__(self) -> None:
|
203 |
+
super().__init__()
|
204 |
+
self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
|
205 |
+
self._table_size = BIG5_TABLE_SIZE
|
206 |
+
self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
|
207 |
+
|
208 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
|
209 |
+
# for big5 encoding, we are interested
|
210 |
+
# first byte range: 0xa4 -- 0xfe
|
211 |
+
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
|
212 |
+
# no validation needed here. State machine has done that
|
213 |
+
first_char, second_char = byte_str[0], byte_str[1]
|
214 |
+
if first_char >= 0xA4:
|
215 |
+
if second_char >= 0xA1:
|
216 |
+
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
|
217 |
+
return 157 * (first_char - 0xA4) + second_char - 0x40
|
218 |
+
return -1
|
219 |
+
|
220 |
+
|
221 |
+
class SJISDistributionAnalysis(CharDistributionAnalysis):
|
222 |
+
def __init__(self) -> None:
|
223 |
+
super().__init__()
|
224 |
+
self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
|
225 |
+
self._table_size = JIS_TABLE_SIZE
|
226 |
+
self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
|
227 |
+
|
228 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
|
229 |
+
# for sjis encoding, we are interested
|
230 |
+
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
|
231 |
+
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
|
232 |
+
# no validation needed here. State machine has done that
|
233 |
+
first_char, second_char = byte_str[0], byte_str[1]
|
234 |
+
if 0x81 <= first_char <= 0x9F:
|
235 |
+
order = 188 * (first_char - 0x81)
|
236 |
+
elif 0xE0 <= first_char <= 0xEF:
|
237 |
+
order = 188 * (first_char - 0xE0 + 31)
|
238 |
+
else:
|
239 |
+
return -1
|
240 |
+
order = order + second_char - 0x40
|
241 |
+
if second_char > 0x7F:
|
242 |
+
order = -1
|
243 |
+
return order
|
244 |
+
|
245 |
+
|
246 |
+
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
|
247 |
+
def __init__(self) -> None:
|
248 |
+
super().__init__()
|
249 |
+
self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
|
250 |
+
self._table_size = JIS_TABLE_SIZE
|
251 |
+
self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
|
252 |
+
|
253 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> int:
|
254 |
+
# for euc-JP encoding, we are interested
|
255 |
+
# first byte range: 0xa0 -- 0xfe
|
256 |
+
# second byte range: 0xa1 -- 0xfe
|
257 |
+
# no validation needed here. State machine has done that
|
258 |
+
char = byte_str[0]
|
259 |
+
if char >= 0xA0:
|
260 |
+
return 94 * (char - 0xA1) + byte_str[1] - 0xA1
|
261 |
+
return -1
|
llmeval-env/lib/python3.10/site-packages/chardet/charsetprober.py
ADDED
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is Mozilla Universal charset detector code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 2001
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
# Shy Shalom - original C code
|
12 |
+
#
|
13 |
+
# This library is free software; you can redistribute it and/or
|
14 |
+
# modify it under the terms of the GNU Lesser General Public
|
15 |
+
# License as published by the Free Software Foundation; either
|
16 |
+
# version 2.1 of the License, or (at your option) any later version.
|
17 |
+
#
|
18 |
+
# This library is distributed in the hope that it will be useful,
|
19 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
20 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
21 |
+
# Lesser General Public License for more details.
|
22 |
+
#
|
23 |
+
# You should have received a copy of the GNU Lesser General Public
|
24 |
+
# License along with this library; if not, write to the Free Software
|
25 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
26 |
+
# 02110-1301 USA
|
27 |
+
######################### END LICENSE BLOCK #########################
|
28 |
+
|
29 |
+
import logging
|
30 |
+
import re
|
31 |
+
from typing import Optional, Union
|
32 |
+
|
33 |
+
from .enums import LanguageFilter, ProbingState
|
34 |
+
|
35 |
+
INTERNATIONAL_WORDS_PATTERN = re.compile(
|
36 |
+
b"[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?"
|
37 |
+
)
|
38 |
+
|
39 |
+
|
40 |
+
class CharSetProber:
|
41 |
+
|
42 |
+
SHORTCUT_THRESHOLD = 0.95
|
43 |
+
|
44 |
+
def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None:
|
45 |
+
self._state = ProbingState.DETECTING
|
46 |
+
self.active = True
|
47 |
+
self.lang_filter = lang_filter
|
48 |
+
self.logger = logging.getLogger(__name__)
|
49 |
+
|
50 |
+
def reset(self) -> None:
|
51 |
+
self._state = ProbingState.DETECTING
|
52 |
+
|
53 |
+
@property
|
54 |
+
def charset_name(self) -> Optional[str]:
|
55 |
+
return None
|
56 |
+
|
57 |
+
@property
|
58 |
+
def language(self) -> Optional[str]:
|
59 |
+
raise NotImplementedError
|
60 |
+
|
61 |
+
def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
|
62 |
+
raise NotImplementedError
|
63 |
+
|
64 |
+
@property
|
65 |
+
def state(self) -> ProbingState:
|
66 |
+
return self._state
|
67 |
+
|
68 |
+
def get_confidence(self) -> float:
|
69 |
+
return 0.0
|
70 |
+
|
71 |
+
@staticmethod
|
72 |
+
def filter_high_byte_only(buf: Union[bytes, bytearray]) -> bytes:
|
73 |
+
buf = re.sub(b"([\x00-\x7F])+", b" ", buf)
|
74 |
+
return buf
|
75 |
+
|
76 |
+
@staticmethod
|
77 |
+
def filter_international_words(buf: Union[bytes, bytearray]) -> bytearray:
|
78 |
+
"""
|
79 |
+
We define three types of bytes:
|
80 |
+
alphabet: english alphabets [a-zA-Z]
|
81 |
+
international: international characters [\x80-\xFF]
|
82 |
+
marker: everything else [^a-zA-Z\x80-\xFF]
|
83 |
+
The input buffer can be thought to contain a series of words delimited
|
84 |
+
by markers. This function works to filter all words that contain at
|
85 |
+
least one international character. All contiguous sequences of markers
|
86 |
+
are replaced by a single space ascii character.
|
87 |
+
This filter applies to all scripts which do not use English characters.
|
88 |
+
"""
|
89 |
+
filtered = bytearray()
|
90 |
+
|
91 |
+
# This regex expression filters out only words that have at-least one
|
92 |
+
# international character. The word may include one marker character at
|
93 |
+
# the end.
|
94 |
+
words = INTERNATIONAL_WORDS_PATTERN.findall(buf)
|
95 |
+
|
96 |
+
for word in words:
|
97 |
+
filtered.extend(word[:-1])
|
98 |
+
|
99 |
+
# If the last character in the word is a marker, replace it with a
|
100 |
+
# space as markers shouldn't affect our analysis (they are used
|
101 |
+
# similarly across all languages and may thus have similar
|
102 |
+
# frequencies).
|
103 |
+
last_char = word[-1:]
|
104 |
+
if not last_char.isalpha() and last_char < b"\x80":
|
105 |
+
last_char = b" "
|
106 |
+
filtered.extend(last_char)
|
107 |
+
|
108 |
+
return filtered
|
109 |
+
|
110 |
+
@staticmethod
|
111 |
+
def remove_xml_tags(buf: Union[bytes, bytearray]) -> bytes:
|
112 |
+
"""
|
113 |
+
Returns a copy of ``buf`` that retains only the sequences of English
|
114 |
+
alphabet and high byte characters that are not between <> characters.
|
115 |
+
This filter can be applied to all scripts which contain both English
|
116 |
+
characters and extended ASCII characters, but is currently only used by
|
117 |
+
``Latin1Prober``.
|
118 |
+
"""
|
119 |
+
filtered = bytearray()
|
120 |
+
in_tag = False
|
121 |
+
prev = 0
|
122 |
+
buf = memoryview(buf).cast("c")
|
123 |
+
|
124 |
+
for curr, buf_char in enumerate(buf):
|
125 |
+
# Check if we're coming out of or entering an XML tag
|
126 |
+
|
127 |
+
# https://github.com/python/typeshed/issues/8182
|
128 |
+
if buf_char == b">": # type: ignore[comparison-overlap]
|
129 |
+
prev = curr + 1
|
130 |
+
in_tag = False
|
131 |
+
# https://github.com/python/typeshed/issues/8182
|
132 |
+
elif buf_char == b"<": # type: ignore[comparison-overlap]
|
133 |
+
if curr > prev and not in_tag:
|
134 |
+
# Keep everything after last non-extended-ASCII,
|
135 |
+
# non-alphabetic character
|
136 |
+
filtered.extend(buf[prev:curr])
|
137 |
+
# Output a space to delimit stretch we kept
|
138 |
+
filtered.extend(b" ")
|
139 |
+
in_tag = True
|
140 |
+
|
141 |
+
# If we're not in a tag...
|
142 |
+
if not in_tag:
|
143 |
+
# Keep everything after last non-extended-ASCII, non-alphabetic
|
144 |
+
# character
|
145 |
+
filtered.extend(buf[prev:])
|
146 |
+
|
147 |
+
return filtered
|
llmeval-env/lib/python3.10/site-packages/chardet/cli/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/chardet/cli/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (184 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/cli/__pycache__/chardetect.cpython-310.pyc
ADDED
Binary file (3.03 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/chardet/cli/chardetect.py
ADDED
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Script which takes one or more file paths and reports on their detected
|
3 |
+
encodings
|
4 |
+
|
5 |
+
Example::
|
6 |
+
|
7 |
+
% chardetect somefile someotherfile
|
8 |
+
somefile: windows-1252 with confidence 0.5
|
9 |
+
someotherfile: ascii with confidence 1.0
|
10 |
+
|
11 |
+
If no paths are provided, it takes its input from stdin.
|
12 |
+
|
13 |
+
"""
|
14 |
+
|
15 |
+
|
16 |
+
import argparse
|
17 |
+
import sys
|
18 |
+
from typing import Iterable, List, Optional
|
19 |
+
|
20 |
+
from .. import __version__
|
21 |
+
from ..universaldetector import UniversalDetector
|
22 |
+
|
23 |
+
|
24 |
+
def description_of(
|
25 |
+
lines: Iterable[bytes],
|
26 |
+
name: str = "stdin",
|
27 |
+
minimal: bool = False,
|
28 |
+
should_rename_legacy: bool = False,
|
29 |
+
) -> Optional[str]:
|
30 |
+
"""
|
31 |
+
Return a string describing the probable encoding of a file or
|
32 |
+
list of strings.
|
33 |
+
|
34 |
+
:param lines: The lines to get the encoding of.
|
35 |
+
:type lines: Iterable of bytes
|
36 |
+
:param name: Name of file or collection of lines
|
37 |
+
:type name: str
|
38 |
+
:param should_rename_legacy: Should we rename legacy encodings to
|
39 |
+
their more modern equivalents?
|
40 |
+
:type should_rename_legacy: ``bool``
|
41 |
+
"""
|
42 |
+
u = UniversalDetector(should_rename_legacy=should_rename_legacy)
|
43 |
+
for line in lines:
|
44 |
+
line = bytearray(line)
|
45 |
+
u.feed(line)
|
46 |
+
# shortcut out of the loop to save reading further - particularly useful if we read a BOM.
|
47 |
+
if u.done:
|
48 |
+
break
|
49 |
+
u.close()
|
50 |
+
result = u.result
|
51 |
+
if minimal:
|
52 |
+
return result["encoding"]
|
53 |
+
if result["encoding"]:
|
54 |
+
return f'{name}: {result["encoding"]} with confidence {result["confidence"]}'
|
55 |
+
return f"{name}: no result"
|
56 |
+
|
57 |
+
|
58 |
+
def main(argv: Optional[List[str]] = None) -> None:
|
59 |
+
"""
|
60 |
+
Handles command line arguments and gets things started.
|
61 |
+
|
62 |
+
:param argv: List of arguments, as if specified on the command-line.
|
63 |
+
If None, ``sys.argv[1:]`` is used instead.
|
64 |
+
:type argv: list of str
|
65 |
+
"""
|
66 |
+
# Get command line arguments
|
67 |
+
parser = argparse.ArgumentParser(
|
68 |
+
description=(
|
69 |
+
"Takes one or more file paths and reports their detected encodings"
|
70 |
+
)
|
71 |
+
)
|
72 |
+
parser.add_argument(
|
73 |
+
"input",
|
74 |
+
help="File whose encoding we would like to determine. (default: stdin)",
|
75 |
+
type=argparse.FileType("rb"),
|
76 |
+
nargs="*",
|
77 |
+
default=[sys.stdin.buffer],
|
78 |
+
)
|
79 |
+
parser.add_argument(
|
80 |
+
"--minimal",
|
81 |
+
help="Print only the encoding to standard output",
|
82 |
+
action="store_true",
|
83 |
+
)
|
84 |
+
parser.add_argument(
|
85 |
+
"-l",
|
86 |
+
"--legacy",
|
87 |
+
help="Rename legacy encodings to more modern ones.",
|
88 |
+
action="store_true",
|
89 |
+
)
|
90 |
+
parser.add_argument(
|
91 |
+
"--version", action="version", version=f"%(prog)s {__version__}"
|
92 |
+
)
|
93 |
+
args = parser.parse_args(argv)
|
94 |
+
|
95 |
+
for f in args.input:
|
96 |
+
if f.isatty():
|
97 |
+
print(
|
98 |
+
"You are running chardetect interactively. Press "
|
99 |
+
"CTRL-D twice at the start of a blank line to signal the "
|
100 |
+
"end of your input. If you want help, run chardetect "
|
101 |
+
"--help\n",
|
102 |
+
file=sys.stderr,
|
103 |
+
)
|
104 |
+
print(
|
105 |
+
description_of(
|
106 |
+
f, f.name, minimal=args.minimal, should_rename_legacy=args.legacy
|
107 |
+
)
|
108 |
+
)
|
109 |
+
|
110 |
+
|
111 |
+
if __name__ == "__main__":
|
112 |
+
main()
|
llmeval-env/lib/python3.10/site-packages/chardet/eucjpprober.py
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is mozilla.org code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 1998
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
from typing import Union
|
29 |
+
|
30 |
+
from .chardistribution import EUCJPDistributionAnalysis
|
31 |
+
from .codingstatemachine import CodingStateMachine
|
32 |
+
from .enums import MachineState, ProbingState
|
33 |
+
from .jpcntx import EUCJPContextAnalysis
|
34 |
+
from .mbcharsetprober import MultiByteCharSetProber
|
35 |
+
from .mbcssm import EUCJP_SM_MODEL
|
36 |
+
|
37 |
+
|
38 |
+
class EUCJPProber(MultiByteCharSetProber):
|
39 |
+
def __init__(self) -> None:
|
40 |
+
super().__init__()
|
41 |
+
self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
|
42 |
+
self.distribution_analyzer = EUCJPDistributionAnalysis()
|
43 |
+
self.context_analyzer = EUCJPContextAnalysis()
|
44 |
+
self.reset()
|
45 |
+
|
46 |
+
def reset(self) -> None:
|
47 |
+
super().reset()
|
48 |
+
self.context_analyzer.reset()
|
49 |
+
|
50 |
+
@property
|
51 |
+
def charset_name(self) -> str:
|
52 |
+
return "EUC-JP"
|
53 |
+
|
54 |
+
@property
|
55 |
+
def language(self) -> str:
|
56 |
+
return "Japanese"
|
57 |
+
|
58 |
+
def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState:
|
59 |
+
assert self.coding_sm is not None
|
60 |
+
assert self.distribution_analyzer is not None
|
61 |
+
|
62 |
+
for i, byte in enumerate(byte_str):
|
63 |
+
# PY3K: byte_str is a byte array, so byte is an int, not a byte
|
64 |
+
coding_state = self.coding_sm.next_state(byte)
|
65 |
+
if coding_state == MachineState.ERROR:
|
66 |
+
self.logger.debug(
|
67 |
+
"%s %s prober hit error at byte %s",
|
68 |
+
self.charset_name,
|
69 |
+
self.language,
|
70 |
+
i,
|
71 |
+
)
|
72 |
+
self._state = ProbingState.NOT_ME
|
73 |
+
break
|
74 |
+
if coding_state == MachineState.ITS_ME:
|
75 |
+
self._state = ProbingState.FOUND_IT
|
76 |
+
break
|
77 |
+
if coding_state == MachineState.START:
|
78 |
+
char_len = self.coding_sm.get_current_charlen()
|
79 |
+
if i == 0:
|
80 |
+
self._last_char[1] = byte
|
81 |
+
self.context_analyzer.feed(self._last_char, char_len)
|
82 |
+
self.distribution_analyzer.feed(self._last_char, char_len)
|
83 |
+
else:
|
84 |
+
self.context_analyzer.feed(byte_str[i - 1 : i + 1], char_len)
|
85 |
+
self.distribution_analyzer.feed(byte_str[i - 1 : i + 1], char_len)
|
86 |
+
|
87 |
+
self._last_char[0] = byte_str[-1]
|
88 |
+
|
89 |
+
if self.state == ProbingState.DETECTING:
|
90 |
+
if self.context_analyzer.got_enough_data() and (
|
91 |
+
self.get_confidence() > self.SHORTCUT_THRESHOLD
|
92 |
+
):
|
93 |
+
self._state = ProbingState.FOUND_IT
|
94 |
+
|
95 |
+
return self.state
|
96 |
+
|
97 |
+
def get_confidence(self) -> float:
|
98 |
+
assert self.distribution_analyzer is not None
|
99 |
+
|
100 |
+
context_conf = self.context_analyzer.get_confidence()
|
101 |
+
distrib_conf = self.distribution_analyzer.get_confidence()
|
102 |
+
return max(context_conf, distrib_conf)
|
llmeval-env/lib/python3.10/site-packages/chardet/euckrfreq.py
ADDED
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is Mozilla Communicator client code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 1998
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
# Sampling from about 20M text materials include literature and computer technology
|
29 |
+
|
30 |
+
# 128 --> 0.79
|
31 |
+
# 256 --> 0.92
|
32 |
+
# 512 --> 0.986
|
33 |
+
# 1024 --> 0.99944
|
34 |
+
# 2048 --> 0.99999
|
35 |
+
#
|
36 |
+
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
|
37 |
+
# Random Distribution Ration = 512 / (2350-512) = 0.279.
|
38 |
+
#
|
39 |
+
# Typical Distribution Ratio
|
40 |
+
|
41 |
+
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
|
42 |
+
|
43 |
+
EUCKR_TABLE_SIZE = 2352
|
44 |
+
|
45 |
+
# Char to FreqOrder table ,
|
46 |
+
# fmt: off
|
47 |
+
EUCKR_CHAR_TO_FREQ_ORDER = (
|
48 |
+
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
|
49 |
+
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
|
50 |
+
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
|
51 |
+
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
|
52 |
+
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
|
53 |
+
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
|
54 |
+
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
|
55 |
+
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
|
56 |
+
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
|
57 |
+
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
|
58 |
+
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
|
59 |
+
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
|
60 |
+
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
|
61 |
+
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
|
62 |
+
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
|
63 |
+
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
|
64 |
+
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
|
65 |
+
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
|
66 |
+
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
|
67 |
+
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
|
68 |
+
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
|
69 |
+
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
|
70 |
+
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
|
71 |
+
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
|
72 |
+
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
|
73 |
+
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
|
74 |
+
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
|
75 |
+
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
|
76 |
+
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
|
77 |
+
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
|
78 |
+
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
|
79 |
+
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
|
80 |
+
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
|
81 |
+
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
|
82 |
+
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
|
83 |
+
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
|
84 |
+
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
|
85 |
+
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
|
86 |
+
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
|
87 |
+
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
|
88 |
+
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
|
89 |
+
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
|
90 |
+
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
|
91 |
+
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
|
92 |
+
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
|
93 |
+
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
|
94 |
+
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
|
95 |
+
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
|
96 |
+
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
|
97 |
+
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
|
98 |
+
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
|
99 |
+
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
|
100 |
+
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
|
101 |
+
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
|
102 |
+
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
|
103 |
+
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
|
104 |
+
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
|
105 |
+
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
|
106 |
+
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
|
107 |
+
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
|
108 |
+
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
|
109 |
+
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
|
110 |
+
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
|
111 |
+
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
|
112 |
+
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
|
113 |
+
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
|
114 |
+
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
|
115 |
+
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
|
116 |
+
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
|
117 |
+
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
|
118 |
+
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
|
119 |
+
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
|
120 |
+
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
|
121 |
+
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
|
122 |
+
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
|
123 |
+
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
|
124 |
+
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
|
125 |
+
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
|
126 |
+
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
|
127 |
+
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
|
128 |
+
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
|
129 |
+
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
|
130 |
+
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
|
131 |
+
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
|
132 |
+
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
|
133 |
+
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
|
134 |
+
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
|
135 |
+
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
|
136 |
+
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
|
137 |
+
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
|
138 |
+
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
|
139 |
+
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
|
140 |
+
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
|
141 |
+
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
|
142 |
+
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
|
143 |
+
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
|
144 |
+
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
|
145 |
+
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
|
146 |
+
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
|
147 |
+
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
|
148 |
+
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
|
149 |
+
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
|
150 |
+
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
|
151 |
+
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
|
152 |
+
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
|
153 |
+
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
|
154 |
+
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
|
155 |
+
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
|
156 |
+
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
|
157 |
+
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
|
158 |
+
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
|
159 |
+
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
|
160 |
+
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
|
161 |
+
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
|
162 |
+
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
|
163 |
+
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
|
164 |
+
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
|
165 |
+
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
|
166 |
+
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
|
167 |
+
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
|
168 |
+
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
|
169 |
+
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
|
170 |
+
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
|
171 |
+
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
|
172 |
+
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
|
173 |
+
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
|
174 |
+
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
|
175 |
+
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
|
176 |
+
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
|
177 |
+
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
|
178 |
+
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
|
179 |
+
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
|
180 |
+
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
|
181 |
+
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
|
182 |
+
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
|
183 |
+
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
|
184 |
+
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
|
185 |
+
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
|
186 |
+
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
|
187 |
+
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
|
188 |
+
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
|
189 |
+
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
|
190 |
+
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
|
191 |
+
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
|
192 |
+
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
|
193 |
+
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
|
194 |
+
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
|
195 |
+
)
|
196 |
+
# fmt: on
|
llmeval-env/lib/python3.10/site-packages/chardet/gb2312prober.py
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is mozilla.org code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 1998
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
from .chardistribution import GB2312DistributionAnalysis
|
29 |
+
from .codingstatemachine import CodingStateMachine
|
30 |
+
from .mbcharsetprober import MultiByteCharSetProber
|
31 |
+
from .mbcssm import GB2312_SM_MODEL
|
32 |
+
|
33 |
+
|
34 |
+
class GB2312Prober(MultiByteCharSetProber):
|
35 |
+
def __init__(self) -> None:
|
36 |
+
super().__init__()
|
37 |
+
self.coding_sm = CodingStateMachine(GB2312_SM_MODEL)
|
38 |
+
self.distribution_analyzer = GB2312DistributionAnalysis()
|
39 |
+
self.reset()
|
40 |
+
|
41 |
+
@property
|
42 |
+
def charset_name(self) -> str:
|
43 |
+
return "GB2312"
|
44 |
+
|
45 |
+
@property
|
46 |
+
def language(self) -> str:
|
47 |
+
return "Chinese"
|
llmeval-env/lib/python3.10/site-packages/chardet/jpcntx.py
ADDED
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is Mozilla Communicator client code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 1998
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
from typing import List, Tuple, Union
|
29 |
+
|
30 |
+
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
|
31 |
+
# fmt: off
|
32 |
+
jp2_char_context = (
|
33 |
+
(0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
|
34 |
+
(2, 4, 0, 4, 0, 3, 0, 4, 0, 3, 4, 4, 4, 2, 4, 3, 3, 4, 3, 2, 3, 3, 4, 2, 3, 3, 3, 2, 4, 1, 4, 3, 3, 1, 5, 4, 3, 4, 3, 4, 3, 5, 3, 0, 3, 5, 4, 2, 0, 3, 1, 0, 3, 3, 0, 3, 3, 0, 1, 1, 0, 4, 3, 0, 3, 3, 0, 4, 0, 2, 0, 3, 5, 5, 5, 5, 4, 0, 4, 1, 0, 3, 4),
|
35 |
+
(0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2),
|
36 |
+
(0, 4, 0, 5, 0, 5, 0, 4, 0, 4, 5, 4, 4, 3, 5, 3, 5, 1, 5, 3, 4, 3, 4, 4, 3, 4, 3, 3, 4, 3, 5, 4, 4, 3, 5, 5, 3, 5, 5, 5, 3, 5, 5, 3, 4, 5, 5, 3, 1, 3, 2, 0, 3, 4, 0, 4, 2, 0, 4, 2, 1, 5, 3, 2, 3, 5, 0, 4, 0, 2, 0, 5, 4, 4, 5, 4, 5, 0, 4, 0, 0, 4, 4),
|
37 |
+
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
38 |
+
(0, 3, 0, 4, 0, 3, 0, 3, 0, 4, 5, 4, 3, 3, 3, 3, 4, 3, 5, 4, 4, 3, 5, 4, 4, 3, 4, 3, 4, 4, 4, 4, 5, 3, 4, 4, 3, 4, 5, 5, 4, 5, 5, 1, 4, 5, 4, 3, 0, 3, 3, 1, 3, 3, 0, 4, 4, 0, 3, 3, 1, 5, 3, 3, 3, 5, 0, 4, 0, 3, 0, 4, 4, 3, 4, 3, 3, 0, 4, 1, 1, 3, 4),
|
39 |
+
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
40 |
+
(0, 4, 0, 3, 0, 3, 0, 4, 0, 3, 4, 4, 3, 2, 2, 1, 2, 1, 3, 1, 3, 3, 3, 3, 3, 4, 3, 1, 3, 3, 5, 3, 3, 0, 4, 3, 0, 5, 4, 3, 3, 5, 4, 4, 3, 4, 4, 5, 0, 1, 2, 0, 1, 2, 0, 2, 2, 0, 1, 0, 0, 5, 2, 2, 1, 4, 0, 3, 0, 1, 0, 4, 4, 3, 5, 4, 3, 0, 2, 1, 0, 4, 3),
|
41 |
+
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
42 |
+
(0, 3, 0, 5, 0, 4, 0, 2, 1, 4, 4, 2, 4, 1, 4, 2, 4, 2, 4, 3, 3, 3, 4, 3, 3, 3, 3, 1, 4, 2, 3, 3, 3, 1, 4, 4, 1, 1, 1, 4, 3, 3, 2, 0, 2, 4, 3, 2, 0, 3, 3, 0, 3, 1, 1, 0, 0, 0, 3, 3, 0, 4, 2, 2, 3, 4, 0, 4, 0, 3, 0, 4, 4, 5, 3, 4, 4, 0, 3, 0, 0, 1, 4),
|
43 |
+
(1, 4, 0, 4, 0, 4, 0, 4, 0, 3, 5, 4, 4, 3, 4, 3, 5, 4, 3, 3, 4, 3, 5, 4, 4, 4, 4, 3, 4, 2, 4, 3, 3, 1, 5, 4, 3, 2, 4, 5, 4, 5, 5, 4, 4, 5, 4, 4, 0, 3, 2, 2, 3, 3, 0, 4, 3, 1, 3, 2, 1, 4, 3, 3, 4, 5, 0, 3, 0, 2, 0, 4, 5, 5, 4, 5, 4, 0, 4, 0, 0, 5, 4),
|
44 |
+
(0, 5, 0, 5, 0, 4, 0, 3, 0, 4, 4, 3, 4, 3, 3, 3, 4, 0, 4, 4, 4, 3, 4, 3, 4, 3, 3, 1, 4, 2, 4, 3, 4, 0, 5, 4, 1, 4, 5, 4, 4, 5, 3, 2, 4, 3, 4, 3, 2, 4, 1, 3, 3, 3, 2, 3, 2, 0, 4, 3, 3, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 5, 4, 4, 4, 3, 0, 4, 1, 0, 1, 3),
|
45 |
+
(0, 3, 1, 4, 0, 3, 0, 2, 0, 3, 4, 4, 3, 1, 4, 2, 3, 3, 4, 3, 4, 3, 4, 3, 4, 4, 3, 2, 3, 1, 5, 4, 4, 1, 4, 4, 3, 5, 4, 4, 3, 5, 5, 4, 3, 4, 4, 3, 1, 2, 3, 1, 2, 2, 0, 3, 2, 0, 3, 1, 0, 5, 3, 3, 3, 4, 3, 3, 3, 3, 4, 4, 4, 4, 5, 4, 2, 0, 3, 3, 2, 4, 3),
|
46 |
+
(0, 2, 0, 3, 0, 1, 0, 1, 0, 0, 3, 2, 0, 0, 2, 0, 1, 0, 2, 1, 3, 3, 3, 1, 2, 3, 1, 0, 1, 0, 4, 2, 1, 1, 3, 3, 0, 4, 3, 3, 1, 4, 3, 3, 0, 3, 3, 2, 0, 0, 0, 0, 1, 0, 0, 2, 0, 0, 0, 0, 0, 4, 1, 0, 2, 3, 2, 2, 2, 1, 3, 3, 3, 4, 4, 3, 2, 0, 3, 1, 0, 3, 3),
|
47 |
+
(0, 4, 0, 4, 0, 3, 0, 3, 0, 4, 4, 4, 3, 3, 3, 3, 3, 3, 4, 3, 4, 2, 4, 3, 4, 3, 3, 2, 4, 3, 4, 5, 4, 1, 4, 5, 3, 5, 4, 5, 3, 5, 4, 0, 3, 5, 5, 3, 1, 3, 3, 2, 2, 3, 0, 3, 4, 1, 3, 3, 2, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 5, 4, 4, 5, 3, 0, 4, 1, 0, 3, 4),
|
48 |
+
(0, 2, 0, 3, 0, 3, 0, 0, 0, 2, 2, 2, 1, 0, 1, 0, 0, 0, 3, 0, 3, 0, 3, 0, 1, 3, 1, 0, 3, 1, 3, 3, 3, 1, 3, 3, 3, 0, 1, 3, 1, 3, 4, 0, 0, 3, 1, 1, 0, 3, 2, 0, 0, 0, 0, 1, 3, 0, 1, 0, 0, 3, 3, 2, 0, 3, 0, 0, 0, 0, 0, 3, 4, 3, 4, 3, 3, 0, 3, 0, 0, 2, 3),
|
49 |
+
(2, 3, 0, 3, 0, 2, 0, 1, 0, 3, 3, 4, 3, 1, 3, 1, 1, 1, 3, 1, 4, 3, 4, 3, 3, 3, 0, 0, 3, 1, 5, 4, 3, 1, 4, 3, 2, 5, 5, 4, 4, 4, 4, 3, 3, 4, 4, 4, 0, 2, 1, 1, 3, 2, 0, 1, 2, 0, 0, 1, 0, 4, 1, 3, 3, 3, 0, 3, 0, 1, 0, 4, 4, 4, 5, 5, 3, 0, 2, 0, 0, 4, 4),
|
50 |
+
(0, 2, 0, 1, 0, 3, 1, 3, 0, 2, 3, 3, 3, 0, 3, 1, 0, 0, 3, 0, 3, 2, 3, 1, 3, 2, 1, 1, 0, 0, 4, 2, 1, 0, 2, 3, 1, 4, 3, 2, 0, 4, 4, 3, 1, 3, 1, 3, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 4, 1, 1, 1, 2, 0, 3, 0, 0, 0, 3, 4, 2, 4, 3, 2, 0, 1, 0, 0, 3, 3),
|
51 |
+
(0, 1, 0, 4, 0, 5, 0, 4, 0, 2, 4, 4, 2, 3, 3, 2, 3, 3, 5, 3, 3, 3, 4, 3, 4, 2, 3, 0, 4, 3, 3, 3, 4, 1, 4, 3, 2, 1, 5, 5, 3, 4, 5, 1, 3, 5, 4, 2, 0, 3, 3, 0, 1, 3, 0, 4, 2, 0, 1, 3, 1, 4, 3, 3, 3, 3, 0, 3, 0, 1, 0, 3, 4, 4, 4, 5, 5, 0, 3, 0, 1, 4, 5),
|
52 |
+
(0, 2, 0, 3, 0, 3, 0, 0, 0, 2, 3, 1, 3, 0, 4, 0, 1, 1, 3, 0, 3, 4, 3, 2, 3, 1, 0, 3, 3, 2, 3, 1, 3, 0, 2, 3, 0, 2, 1, 4, 1, 2, 2, 0, 0, 3, 3, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 0, 2, 2, 0, 3, 2, 1, 3, 3, 0, 2, 0, 2, 0, 0, 3, 3, 1, 2, 4, 0, 3, 0, 2, 2, 3),
|
53 |
+
(2, 4, 0, 5, 0, 4, 0, 4, 0, 2, 4, 4, 4, 3, 4, 3, 3, 3, 1, 2, 4, 3, 4, 3, 4, 4, 5, 0, 3, 3, 3, 3, 2, 0, 4, 3, 1, 4, 3, 4, 1, 4, 4, 3, 3, 4, 4, 3, 1, 2, 3, 0, 4, 2, 0, 4, 1, 0, 3, 3, 0, 4, 3, 3, 3, 4, 0, 4, 0, 2, 0, 3, 5, 3, 4, 5, 2, 0, 3, 0, 0, 4, 5),
|
54 |
+
(0, 3, 0, 4, 0, 1, 0, 1, 0, 1, 3, 2, 2, 1, 3, 0, 3, 0, 2, 0, 2, 0, 3, 0, 2, 0, 0, 0, 1, 0, 1, 1, 0, 0, 3, 1, 0, 0, 0, 4, 0, 3, 1, 0, 2, 1, 3, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 4, 2, 2, 3, 1, 0, 3, 0, 0, 0, 1, 4, 4, 4, 3, 0, 0, 4, 0, 0, 1, 4),
|
55 |
+
(1, 4, 1, 5, 0, 3, 0, 3, 0, 4, 5, 4, 4, 3, 5, 3, 3, 4, 4, 3, 4, 1, 3, 3, 3, 3, 2, 1, 4, 1, 5, 4, 3, 1, 4, 4, 3, 5, 4, 4, 3, 5, 4, 3, 3, 4, 4, 4, 0, 3, 3, 1, 2, 3, 0, 3, 1, 0, 3, 3, 0, 5, 4, 4, 4, 4, 4, 4, 3, 3, 5, 4, 4, 3, 3, 5, 4, 0, 3, 2, 0, 4, 4),
|
56 |
+
(0, 2, 0, 3, 0, 1, 0, 0, 0, 1, 3, 3, 3, 2, 4, 1, 3, 0, 3, 1, 3, 0, 2, 2, 1, 1, 0, 0, 2, 0, 4, 3, 1, 0, 4, 3, 0, 4, 4, 4, 1, 4, 3, 1, 1, 3, 3, 1, 0, 2, 0, 0, 1, 3, 0, 0, 0, 0, 2, 0, 0, 4, 3, 2, 4, 3, 5, 4, 3, 3, 3, 4, 3, 3, 4, 3, 3, 0, 2, 1, 0, 3, 3),
|
57 |
+
(0, 2, 0, 4, 0, 3, 0, 2, 0, 2, 5, 5, 3, 4, 4, 4, 4, 1, 4, 3, 3, 0, 4, 3, 4, 3, 1, 3, 3, 2, 4, 3, 0, 3, 4, 3, 0, 3, 4, 4, 2, 4, 4, 0, 4, 5, 3, 3, 2, 2, 1, 1, 1, 2, 0, 1, 5, 0, 3, 3, 2, 4, 3, 3, 3, 4, 0, 3, 0, 2, 0, 4, 4, 3, 5, 5, 0, 0, 3, 0, 2, 3, 3),
|
58 |
+
(0, 3, 0, 4, 0, 3, 0, 1, 0, 3, 4, 3, 3, 1, 3, 3, 3, 0, 3, 1, 3, 0, 4, 3, 3, 1, 1, 0, 3, 0, 3, 3, 0, 0, 4, 4, 0, 1, 5, 4, 3, 3, 5, 0, 3, 3, 4, 3, 0, 2, 0, 1, 1, 1, 0, 1, 3, 0, 1, 2, 1, 3, 3, 2, 3, 3, 0, 3, 0, 1, 0, 1, 3, 3, 4, 4, 1, 0, 1, 2, 2, 1, 3),
|
59 |
+
(0, 1, 0, 4, 0, 4, 0, 3, 0, 1, 3, 3, 3, 2, 3, 1, 1, 0, 3, 0, 3, 3, 4, 3, 2, 4, 2, 0, 1, 0, 4, 3, 2, 0, 4, 3, 0, 5, 3, 3, 2, 4, 4, 4, 3, 3, 3, 4, 0, 1, 3, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 4, 2, 3, 3, 3, 0, 3, 0, 0, 0, 4, 4, 4, 5, 3, 2, 0, 3, 3, 0, 3, 5),
|
60 |
+
(0, 2, 0, 3, 0, 0, 0, 3, 0, 1, 3, 0, 2, 0, 0, 0, 1, 0, 3, 1, 1, 3, 3, 0, 0, 3, 0, 0, 3, 0, 2, 3, 1, 0, 3, 1, 0, 3, 3, 2, 0, 4, 2, 2, 0, 2, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 2, 0, 1, 0, 1, 0, 0, 0, 1, 3, 1, 2, 0, 0, 0, 1, 0, 0, 1, 4),
|
61 |
+
(0, 3, 0, 3, 0, 5, 0, 1, 0, 2, 4, 3, 1, 3, 3, 2, 1, 1, 5, 2, 1, 0, 5, 1, 2, 0, 0, 0, 3, 3, 2, 2, 3, 2, 4, 3, 0, 0, 3, 3, 1, 3, 3, 0, 2, 5, 3, 4, 0, 3, 3, 0, 1, 2, 0, 2, 2, 0, 3, 2, 0, 2, 2, 3, 3, 3, 0, 2, 0, 1, 0, 3, 4, 4, 2, 5, 4, 0, 3, 0, 0, 3, 5),
|
62 |
+
(0, 3, 0, 3, 0, 3, 0, 1, 0, 3, 3, 3, 3, 0, 3, 0, 2, 0, 2, 1, 1, 0, 2, 0, 1, 0, 0, 0, 2, 1, 0, 0, 1, 0, 3, 2, 0, 0, 3, 3, 1, 2, 3, 1, 0, 3, 3, 0, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 3, 1, 2, 3, 0, 3, 0, 1, 0, 3, 2, 1, 0, 4, 3, 0, 1, 1, 0, 3, 3),
|
63 |
+
(0, 4, 0, 5, 0, 3, 0, 3, 0, 4, 5, 5, 4, 3, 5, 3, 4, 3, 5, 3, 3, 2, 5, 3, 4, 4, 4, 3, 4, 3, 4, 5, 5, 3, 4, 4, 3, 4, 4, 5, 4, 4, 4, 3, 4, 5, 5, 4, 2, 3, 4, 2, 3, 4, 0, 3, 3, 1, 4, 3, 2, 4, 3, 3, 5, 5, 0, 3, 0, 3, 0, 5, 5, 5, 5, 4, 4, 0, 4, 0, 1, 4, 4),
|
64 |
+
(0, 4, 0, 4, 0, 3, 0, 3, 0, 3, 5, 4, 4, 2, 3, 2, 5, 1, 3, 2, 5, 1, 4, 2, 3, 2, 3, 3, 4, 3, 3, 3, 3, 2, 5, 4, 1, 3, 3, 5, 3, 4, 4, 0, 4, 4, 3, 1, 1, 3, 1, 0, 2, 3, 0, 2, 3, 0, 3, 0, 0, 4, 3, 1, 3, 4, 0, 3, 0, 2, 0, 4, 4, 4, 3, 4, 5, 0, 4, 0, 0, 3, 4),
|
65 |
+
(0, 3, 0, 3, 0, 3, 1, 2, 0, 3, 4, 4, 3, 3, 3, 0, 2, 2, 4, 3, 3, 1, 3, 3, 3, 1, 1, 0, 3, 1, 4, 3, 2, 3, 4, 4, 2, 4, 4, 4, 3, 4, 4, 3, 2, 4, 4, 3, 1, 3, 3, 1, 3, 3, 0, 4, 1, 0, 2, 2, 1, 4, 3, 2, 3, 3, 5, 4, 3, 3, 5, 4, 4, 3, 3, 0, 4, 0, 3, 2, 2, 4, 4),
|
66 |
+
(0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 2, 1, 3, 0, 0, 0, 0, 0, 2, 0, 1, 2, 1, 0, 0, 1, 0, 0, 0, 0, 3, 0, 0, 1, 0, 1, 1, 3, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 0, 3, 4, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1),
|
67 |
+
(0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 4, 1, 4, 0, 3, 0, 4, 0, 3, 0, 4, 0, 3, 0, 3, 0, 4, 1, 5, 1, 4, 0, 0, 3, 0, 5, 0, 5, 2, 0, 1, 0, 0, 0, 2, 1, 4, 0, 1, 3, 0, 0, 3, 0, 0, 3, 1, 1, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0),
|
68 |
+
(1, 4, 0, 5, 0, 3, 0, 2, 0, 3, 5, 4, 4, 3, 4, 3, 5, 3, 4, 3, 3, 0, 4, 3, 3, 3, 3, 3, 3, 2, 4, 4, 3, 1, 3, 4, 4, 5, 4, 4, 3, 4, 4, 1, 3, 5, 4, 3, 3, 3, 1, 2, 2, 3, 3, 1, 3, 1, 3, 3, 3, 5, 3, 3, 4, 5, 0, 3, 0, 3, 0, 3, 4, 3, 4, 4, 3, 0, 3, 0, 2, 4, 3),
|
69 |
+
(0, 1, 0, 4, 0, 0, 0, 0, 0, 1, 4, 0, 4, 1, 4, 2, 4, 0, 3, 0, 1, 0, 1, 0, 0, 0, 0, 0, 2, 0, 3, 1, 1, 1, 0, 3, 0, 0, 0, 1, 2, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 3, 0, 0, 0, 0, 3, 2, 0, 2, 2, 0, 1, 0, 0, 0, 2, 3, 2, 3, 3, 0, 0, 0, 0, 2, 1, 0),
|
70 |
+
(0, 5, 1, 5, 0, 3, 0, 3, 0, 5, 4, 4, 5, 1, 5, 3, 3, 0, 4, 3, 4, 3, 5, 3, 4, 3, 3, 2, 4, 3, 4, 3, 3, 0, 3, 3, 1, 4, 4, 3, 4, 4, 4, 3, 4, 5, 5, 3, 2, 3, 1, 1, 3, 3, 1, 3, 1, 1, 3, 3, 2, 4, 5, 3, 3, 5, 0, 4, 0, 3, 0, 4, 4, 3, 5, 3, 3, 0, 3, 4, 0, 4, 3),
|
71 |
+
(0, 5, 0, 5, 0, 3, 0, 2, 0, 4, 4, 3, 5, 2, 4, 3, 3, 3, 4, 4, 4, 3, 5, 3, 5, 3, 3, 1, 4, 0, 4, 3, 3, 0, 3, 3, 0, 4, 4, 4, 4, 5, 4, 3, 3, 5, 5, 3, 2, 3, 1, 2, 3, 2, 0, 1, 0, 0, 3, 2, 2, 4, 4, 3, 1, 5, 0, 4, 0, 3, 0, 4, 3, 1, 3, 2, 1, 0, 3, 3, 0, 3, 3),
|
72 |
+
(0, 4, 0, 5, 0, 5, 0, 4, 0, 4, 5, 5, 5, 3, 4, 3, 3, 2, 5, 4, 4, 3, 5, 3, 5, 3, 4, 0, 4, 3, 4, 4, 3, 2, 4, 4, 3, 4, 5, 4, 4, 5, 5, 0, 3, 5, 5, 4, 1, 3, 3, 2, 3, 3, 1, 3, 1, 0, 4, 3, 1, 4, 4, 3, 4, 5, 0, 4, 0, 2, 0, 4, 3, 4, 4, 3, 3, 0, 4, 0, 0, 5, 5),
|
73 |
+
(0, 4, 0, 4, 0, 5, 0, 1, 1, 3, 3, 4, 4, 3, 4, 1, 3, 0, 5, 1, 3, 0, 3, 1, 3, 1, 1, 0, 3, 0, 3, 3, 4, 0, 4, 3, 0, 4, 4, 4, 3, 4, 4, 0, 3, 5, 4, 1, 0, 3, 0, 0, 2, 3, 0, 3, 1, 0, 3, 1, 0, 3, 2, 1, 3, 5, 0, 3, 0, 1, 0, 3, 2, 3, 3, 4, 4, 0, 2, 2, 0, 4, 4),
|
74 |
+
(2, 4, 0, 5, 0, 4, 0, 3, 0, 4, 5, 5, 4, 3, 5, 3, 5, 3, 5, 3, 5, 2, 5, 3, 4, 3, 3, 4, 3, 4, 5, 3, 2, 1, 5, 4, 3, 2, 3, 4, 5, 3, 4, 1, 2, 5, 4, 3, 0, 3, 3, 0, 3, 2, 0, 2, 3, 0, 4, 1, 0, 3, 4, 3, 3, 5, 0, 3, 0, 1, 0, 4, 5, 5, 5, 4, 3, 0, 4, 2, 0, 3, 5),
|
75 |
+
(0, 5, 0, 4, 0, 4, 0, 2, 0, 5, 4, 3, 4, 3, 4, 3, 3, 3, 4, 3, 4, 2, 5, 3, 5, 3, 4, 1, 4, 3, 4, 4, 4, 0, 3, 5, 0, 4, 4, 4, 4, 5, 3, 1, 3, 4, 5, 3, 3, 3, 3, 3, 3, 3, 0, 2, 2, 0, 3, 3, 2, 4, 3, 3, 3, 5, 3, 4, 1, 3, 3, 5, 3, 2, 0, 0, 0, 0, 4, 3, 1, 3, 3),
|
76 |
+
(0, 1, 0, 3, 0, 3, 0, 1, 0, 1, 3, 3, 3, 2, 3, 3, 3, 0, 3, 0, 0, 0, 3, 1, 3, 0, 0, 0, 2, 2, 2, 3, 0, 0, 3, 2, 0, 1, 2, 4, 1, 3, 3, 0, 0, 3, 3, 3, 0, 1, 0, 0, 2, 1, 0, 0, 3, 0, 3, 1, 0, 3, 0, 0, 1, 3, 0, 2, 0, 1, 0, 3, 3, 1, 3, 3, 0, 0, 1, 1, 0, 3, 3),
|
77 |
+
(0, 2, 0, 3, 0, 2, 1, 4, 0, 2, 2, 3, 1, 1, 3, 1, 1, 0, 2, 0, 3, 1, 2, 3, 1, 3, 0, 0, 1, 0, 4, 3, 2, 3, 3, 3, 1, 4, 2, 3, 3, 3, 3, 1, 0, 3, 1, 4, 0, 1, 1, 0, 1, 2, 0, 1, 1, 0, 1, 1, 0, 3, 1, 3, 2, 2, 0, 1, 0, 0, 0, 2, 3, 3, 3, 1, 0, 0, 0, 0, 0, 2, 3),
|
78 |
+
(0, 5, 0, 4, 0, 5, 0, 2, 0, 4, 5, 5, 3, 3, 4, 3, 3, 1, 5, 4, 4, 2, 4, 4, 4, 3, 4, 2, 4, 3, 5, 5, 4, 3, 3, 4, 3, 3, 5, 5, 4, 5, 5, 1, 3, 4, 5, 3, 1, 4, 3, 1, 3, 3, 0, 3, 3, 1, 4, 3, 1, 4, 5, 3, 3, 5, 0, 4, 0, 3, 0, 5, 3, 3, 1, 4, 3, 0, 4, 0, 1, 5, 3),
|
79 |
+
(0, 5, 0, 5, 0, 4, 0, 2, 0, 4, 4, 3, 4, 3, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4, 4, 5, 3, 3, 5, 2, 4, 4, 4, 3, 4, 4, 3, 3, 4, 4, 5, 5, 3, 3, 4, 3, 4, 3, 3, 4, 3, 3, 3, 3, 1, 2, 2, 1, 4, 3, 3, 5, 4, 4, 3, 4, 0, 4, 0, 3, 0, 4, 4, 4, 4, 4, 1, 0, 4, 2, 0, 2, 4),
|
80 |
+
(0, 4, 0, 4, 0, 3, 0, 1, 0, 3, 5, 2, 3, 0, 3, 0, 2, 1, 4, 2, 3, 3, 4, 1, 4, 3, 3, 2, 4, 1, 3, 3, 3, 0, 3, 3, 0, 0, 3, 3, 3, 5, 3, 3, 3, 3, 3, 2, 0, 2, 0, 0, 2, 0, 0, 2, 0, 0, 1, 0, 0, 3, 1, 2, 2, 3, 0, 3, 0, 2, 0, 4, 4, 3, 3, 4, 1, 0, 3, 0, 0, 2, 4),
|
81 |
+
(0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 2, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 3, 1, 3, 0, 3, 2, 0, 0, 0, 1, 0, 3, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 2, 0, 0, 0, 0, 0, 0, 2),
|
82 |
+
(0, 2, 1, 3, 0, 2, 0, 2, 0, 3, 3, 3, 3, 1, 3, 1, 3, 3, 3, 3, 3, 3, 4, 2, 2, 1, 2, 1, 4, 0, 4, 3, 1, 3, 3, 3, 2, 4, 3, 5, 4, 3, 3, 3, 3, 3, 3, 3, 0, 1, 3, 0, 2, 0, 0, 1, 0, 0, 1, 0, 0, 4, 2, 0, 2, 3, 0, 3, 3, 0, 3, 3, 4, 2, 3, 1, 4, 0, 1, 2, 0, 2, 3),
|
83 |
+
(0, 3, 0, 3, 0, 1, 0, 3, 0, 2, 3, 3, 3, 0, 3, 1, 2, 0, 3, 3, 2, 3, 3, 2, 3, 2, 3, 1, 3, 0, 4, 3, 2, 0, 3, 3, 1, 4, 3, 3, 2, 3, 4, 3, 1, 3, 3, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 4, 1, 1, 0, 3, 0, 3, 1, 0, 2, 3, 3, 3, 3, 3, 1, 0, 0, 2, 0, 3, 3),
|
84 |
+
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 2, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 3),
|
85 |
+
(0, 2, 0, 3, 1, 3, 0, 3, 0, 2, 3, 3, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 1, 3, 0, 2, 3, 1, 1, 4, 3, 3, 2, 3, 3, 1, 2, 2, 4, 1, 3, 3, 0, 1, 4, 2, 3, 0, 1, 3, 0, 3, 0, 0, 1, 3, 0, 2, 0, 0, 3, 3, 2, 1, 3, 0, 3, 0, 2, 0, 3, 4, 4, 4, 3, 1, 0, 3, 0, 0, 3, 3),
|
86 |
+
(0, 2, 0, 1, 0, 2, 0, 0, 0, 1, 3, 2, 2, 1, 3, 0, 1, 1, 3, 0, 3, 2, 3, 1, 2, 0, 2, 0, 1, 1, 3, 3, 3, 0, 3, 3, 1, 1, 2, 3, 2, 3, 3, 1, 2, 3, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 2, 1, 2, 1, 3, 0, 3, 0, 0, 0, 3, 4, 4, 4, 3, 2, 0, 2, 0, 0, 2, 4),
|
87 |
+
(0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 3),
|
88 |
+
(0, 3, 0, 3, 0, 2, 0, 3, 0, 3, 3, 3, 2, 3, 2, 2, 2, 0, 3, 1, 3, 3, 3, 2, 3, 3, 0, 0, 3, 0, 3, 2, 2, 0, 2, 3, 1, 4, 3, 4, 3, 3, 2, 3, 1, 5, 4, 4, 0, 3, 1, 2, 1, 3, 0, 3, 1, 1, 2, 0, 2, 3, 1, 3, 1, 3, 0, 3, 0, 1, 0, 3, 3, 4, 4, 2, 1, 0, 2, 1, 0, 2, 4),
|
89 |
+
(0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 4, 2, 5, 1, 4, 0, 2, 0, 2, 1, 3, 1, 4, 0, 2, 1, 0, 0, 2, 1, 4, 1, 1, 0, 3, 3, 0, 5, 1, 3, 2, 3, 3, 1, 0, 3, 2, 3, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 1, 0, 3, 0, 2, 0, 1, 0, 3, 3, 3, 4, 3, 3, 0, 0, 0, 0, 2, 3),
|
90 |
+
(0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 1, 0, 0, 0, 0, 0, 3),
|
91 |
+
(0, 1, 0, 3, 0, 4, 0, 3, 0, 2, 4, 3, 1, 0, 3, 2, 2, 1, 3, 1, 2, 2, 3, 1, 1, 1, 2, 1, 3, 0, 1, 2, 0, 1, 3, 2, 1, 3, 0, 5, 5, 1, 0, 0, 1, 3, 2, 1, 0, 3, 0, 0, 1, 0, 0, 0, 0, 0, 3, 4, 0, 1, 1, 1, 3, 2, 0, 2, 0, 1, 0, 2, 3, 3, 1, 2, 3, 0, 1, 0, 1, 0, 4),
|
92 |
+
(0, 0, 0, 1, 0, 3, 0, 3, 0, 2, 2, 1, 0, 0, 4, 0, 3, 0, 3, 1, 3, 0, 3, 0, 3, 0, 1, 0, 3, 0, 3, 1, 3, 0, 3, 3, 0, 0, 1, 2, 1, 1, 1, 0, 1, 2, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 2, 0, 0, 2, 0, 0, 0, 0, 2, 3, 3, 3, 3, 0, 0, 0, 0, 1, 4),
|
93 |
+
(0, 0, 0, 3, 0, 3, 0, 0, 0, 0, 3, 1, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 0, 2, 0, 2, 3, 0, 0, 2, 2, 3, 1, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 2, 0, 0, 0, 0, 2, 3),
|
94 |
+
(2, 4, 0, 5, 0, 5, 0, 4, 0, 3, 4, 3, 3, 3, 4, 3, 3, 3, 4, 3, 4, 4, 5, 4, 5, 5, 5, 2, 3, 0, 5, 5, 4, 1, 5, 4, 3, 1, 5, 4, 3, 4, 4, 3, 3, 4, 3, 3, 0, 3, 2, 0, 2, 3, 0, 3, 0, 0, 3, 3, 0, 5, 3, 2, 3, 3, 0, 3, 0, 3, 0, 3, 4, 5, 4, 5, 3, 0, 4, 3, 0, 3, 4),
|
95 |
+
(0, 3, 0, 3, 0, 3, 0, 3, 0, 3, 3, 4, 3, 2, 3, 2, 3, 0, 4, 3, 3, 3, 3, 3, 3, 3, 3, 0, 3, 2, 4, 3, 3, 1, 3, 4, 3, 4, 4, 4, 3, 4, 4, 3, 2, 4, 4, 1, 0, 2, 0, 0, 1, 1, 0, 2, 0, 0, 3, 1, 0, 5, 3, 2, 1, 3, 0, 3, 0, 1, 2, 4, 3, 2, 4, 3, 3, 0, 3, 2, 0, 4, 4),
|
96 |
+
(0, 3, 0, 3, 0, 1, 0, 0, 0, 1, 4, 3, 3, 2, 3, 1, 3, 1, 4, 2, 3, 2, 4, 2, 3, 4, 3, 0, 2, 2, 3, 3, 3, 0, 3, 3, 3, 0, 3, 4, 1, 3, 3, 0, 3, 4, 3, 3, 0, 1, 1, 0, 1, 0, 0, 0, 4, 0, 3, 0, 0, 3, 1, 2, 1, 3, 0, 4, 0, 1, 0, 4, 3, 3, 4, 3, 3, 0, 2, 0, 0, 3, 3),
|
97 |
+
(0, 3, 0, 4, 0, 1, 0, 3, 0, 3, 4, 3, 3, 0, 3, 3, 3, 1, 3, 1, 3, 3, 4, 3, 3, 3, 0, 0, 3, 1, 5, 3, 3, 1, 3, 3, 2, 5, 4, 3, 3, 4, 5, 3, 2, 5, 3, 4, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 1, 1, 0, 4, 2, 2, 1, 3, 0, 3, 0, 2, 0, 4, 4, 3, 5, 3, 2, 0, 1, 1, 0, 3, 4),
|
98 |
+
(0, 5, 0, 4, 0, 5, 0, 2, 0, 4, 4, 3, 3, 2, 3, 3, 3, 1, 4, 3, 4, 1, 5, 3, 4, 3, 4, 0, 4, 2, 4, 3, 4, 1, 5, 4, 0, 4, 4, 4, 4, 5, 4, 1, 3, 5, 4, 2, 1, 4, 1, 1, 3, 2, 0, 3, 1, 0, 3, 2, 1, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 4, 4, 3, 3, 3, 0, 4, 2, 0, 3, 4),
|
99 |
+
(1, 4, 0, 4, 0, 3, 0, 1, 0, 3, 3, 3, 1, 1, 3, 3, 2, 2, 3, 3, 1, 0, 3, 2, 2, 1, 2, 0, 3, 1, 2, 1, 2, 0, 3, 2, 0, 2, 2, 3, 3, 4, 3, 0, 3, 3, 1, 2, 0, 1, 1, 3, 1, 2, 0, 0, 3, 0, 1, 1, 0, 3, 2, 2, 3, 3, 0, 3, 0, 0, 0, 2, 3, 3, 4, 3, 3, 0, 1, 0, 0, 1, 4),
|
100 |
+
(0, 4, 0, 4, 0, 4, 0, 0, 0, 3, 4, 4, 3, 1, 4, 2, 3, 2, 3, 3, 3, 1, 4, 3, 4, 0, 3, 0, 4, 2, 3, 3, 2, 2, 5, 4, 2, 1, 3, 4, 3, 4, 3, 1, 3, 3, 4, 2, 0, 2, 1, 0, 3, 3, 0, 0, 2, 0, 3, 1, 0, 4, 4, 3, 4, 3, 0, 4, 0, 1, 0, 2, 4, 4, 4, 4, 4, 0, 3, 2, 0, 3, 3),
|
101 |
+
(0, 0, 0, 1, 0, 4, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 2, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 2),
|
102 |
+
(0, 2, 0, 3, 0, 4, 0, 4, 0, 1, 3, 3, 3, 0, 4, 0, 2, 1, 2, 1, 1, 1, 2, 0, 3, 1, 1, 0, 1, 0, 3, 1, 0, 0, 3, 3, 2, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 2, 2, 0, 3, 1, 0, 0, 1, 0, 1, 1, 0, 1, 2, 0, 3, 0, 0, 0, 0, 1, 0, 0, 3, 3, 4, 3, 1, 0, 1, 0, 3, 0, 2),
|
103 |
+
(0, 0, 0, 3, 0, 5, 0, 0, 0, 0, 1, 0, 2, 0, 3, 1, 0, 1, 3, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 4, 0, 0, 0, 2, 3, 0, 1, 4, 1, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 3, 0, 0, 0, 0, 0, 3),
|
104 |
+
(0, 2, 0, 5, 0, 5, 0, 1, 0, 2, 4, 3, 3, 2, 5, 1, 3, 2, 3, 3, 3, 0, 4, 1, 2, 0, 3, 0, 4, 0, 2, 2, 1, 1, 5, 3, 0, 0, 1, 4, 2, 3, 2, 0, 3, 3, 3, 2, 0, 2, 4, 1, 1, 2, 0, 1, 1, 0, 3, 1, 0, 1, 3, 1, 2, 3, 0, 2, 0, 0, 0, 1, 3, 5, 4, 4, 4, 0, 3, 0, 0, 1, 3),
|
105 |
+
(0, 4, 0, 5, 0, 4, 0, 4, 0, 4, 5, 4, 3, 3, 4, 3, 3, 3, 4, 3, 4, 4, 5, 3, 4, 5, 4, 2, 4, 2, 3, 4, 3, 1, 4, 4, 1, 3, 5, 4, 4, 5, 5, 4, 4, 5, 5, 5, 2, 3, 3, 1, 4, 3, 1, 3, 3, 0, 3, 3, 1, 4, 3, 4, 4, 4, 0, 3, 0, 4, 0, 3, 3, 4, 4, 5, 0, 0, 4, 3, 0, 4, 5),
|
106 |
+
(0, 4, 0, 4, 0, 3, 0, 3, 0, 3, 4, 4, 4, 3, 3, 2, 4, 3, 4, 3, 4, 3, 5, 3, 4, 3, 2, 1, 4, 2, 4, 4, 3, 1, 3, 4, 2, 4, 5, 5, 3, 4, 5, 4, 1, 5, 4, 3, 0, 3, 2, 2, 3, 2, 1, 3, 1, 0, 3, 3, 3, 5, 3, 3, 3, 5, 4, 4, 2, 3, 3, 4, 3, 3, 3, 2, 1, 0, 3, 2, 1, 4, 3),
|
107 |
+
(0, 4, 0, 5, 0, 4, 0, 3, 0, 3, 5, 5, 3, 2, 4, 3, 4, 0, 5, 4, 4, 1, 4, 4, 4, 3, 3, 3, 4, 3, 5, 5, 2, 3, 3, 4, 1, 2, 5, 5, 3, 5, 5, 2, 3, 5, 5, 4, 0, 3, 2, 0, 3, 3, 1, 1, 5, 1, 4, 1, 0, 4, 3, 2, 3, 5, 0, 4, 0, 3, 0, 5, 4, 3, 4, 3, 0, 0, 4, 1, 0, 4, 4),
|
108 |
+
(1, 3, 0, 4, 0, 2, 0, 2, 0, 2, 5, 5, 3, 3, 3, 3, 3, 0, 4, 2, 3, 4, 4, 4, 3, 4, 0, 0, 3, 4, 5, 4, 3, 3, 3, 3, 2, 5, 5, 4, 5, 5, 5, 4, 3, 5, 5, 5, 1, 3, 1, 0, 1, 0, 0, 3, 2, 0, 4, 2, 0, 5, 2, 3, 2, 4, 1, 3, 0, 3, 0, 4, 5, 4, 5, 4, 3, 0, 4, 2, 0, 5, 4),
|
109 |
+
(0, 3, 0, 4, 0, 5, 0, 3, 0, 3, 4, 4, 3, 2, 3, 2, 3, 3, 3, 3, 3, 2, 4, 3, 3, 2, 2, 0, 3, 3, 3, 3, 3, 1, 3, 3, 3, 0, 4, 4, 3, 4, 4, 1, 1, 4, 4, 2, 0, 3, 1, 0, 1, 1, 0, 4, 1, 0, 2, 3, 1, 3, 3, 1, 3, 4, 0, 3, 0, 1, 0, 3, 1, 3, 0, 0, 1, 0, 2, 0, 0, 4, 4),
|
110 |
+
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
111 |
+
(0, 3, 0, 3, 0, 2, 0, 3, 0, 1, 5, 4, 3, 3, 3, 1, 4, 2, 1, 2, 3, 4, 4, 2, 4, 4, 5, 0, 3, 1, 4, 3, 4, 0, 4, 3, 3, 3, 2, 3, 2, 5, 3, 4, 3, 2, 2, 3, 0, 0, 3, 0, 2, 1, 0, 1, 2, 0, 0, 0, 0, 2, 1, 1, 3, 1, 0, 2, 0, 4, 0, 3, 4, 4, 4, 5, 2, 0, 2, 0, 0, 1, 3),
|
112 |
+
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 4, 2, 1, 1, 0, 1, 0, 3, 2, 0, 0, 3, 1, 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 4, 0, 4, 2, 1, 0, 0, 0, 0, 0, 1),
|
113 |
+
(0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 2, 0, 2, 1, 0, 0, 1, 2, 1, 0, 1, 1, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2),
|
114 |
+
(0, 4, 0, 4, 0, 4, 0, 3, 0, 4, 4, 3, 4, 2, 4, 3, 2, 0, 4, 4, 4, 3, 5, 3, 5, 3, 3, 2, 4, 2, 4, 3, 4, 3, 1, 4, 0, 2, 3, 4, 4, 4, 3, 3, 3, 4, 4, 4, 3, 4, 1, 3, 4, 3, 2, 1, 2, 1, 3, 3, 3, 4, 4, 3, 3, 5, 0, 4, 0, 3, 0, 4, 3, 3, 3, 2, 1, 0, 3, 0, 0, 3, 3),
|
115 |
+
(0, 4, 0, 3, 0, 3, 0, 3, 0, 3, 5, 5, 3, 3, 3, 3, 4, 3, 4, 3, 3, 3, 4, 4, 4, 3, 3, 3, 3, 4, 3, 5, 3, 3, 1, 3, 2, 4, 5, 5, 5, 5, 4, 3, 4, 5, 5, 3, 2, 2, 3, 3, 3, 3, 2, 3, 3, 1, 2, 3, 2, 4, 3, 3, 3, 4, 0, 4, 0, 2, 0, 4, 3, 2, 2, 1, 2, 0, 3, 0, 0, 4, 1),
|
116 |
+
)
|
117 |
+
# fmt: on
|
118 |
+
|
119 |
+
|
120 |
+
class JapaneseContextAnalysis:
|
121 |
+
NUM_OF_CATEGORY = 6
|
122 |
+
DONT_KNOW = -1
|
123 |
+
ENOUGH_REL_THRESHOLD = 100
|
124 |
+
MAX_REL_THRESHOLD = 1000
|
125 |
+
MINIMUM_DATA_THRESHOLD = 4
|
126 |
+
|
127 |
+
def __init__(self) -> None:
|
128 |
+
self._total_rel = 0
|
129 |
+
self._rel_sample: List[int] = []
|
130 |
+
self._need_to_skip_char_num = 0
|
131 |
+
self._last_char_order = -1
|
132 |
+
self._done = False
|
133 |
+
self.reset()
|
134 |
+
|
135 |
+
def reset(self) -> None:
|
136 |
+
self._total_rel = 0 # total sequence received
|
137 |
+
# category counters, each integer counts sequence in its category
|
138 |
+
self._rel_sample = [0] * self.NUM_OF_CATEGORY
|
139 |
+
# if last byte in current buffer is not the last byte of a character,
|
140 |
+
# we need to know how many bytes to skip in next buffer
|
141 |
+
self._need_to_skip_char_num = 0
|
142 |
+
self._last_char_order = -1 # The order of previous char
|
143 |
+
# If this flag is set to True, detection is done and conclusion has
|
144 |
+
# been made
|
145 |
+
self._done = False
|
146 |
+
|
147 |
+
def feed(self, byte_str: Union[bytes, bytearray], num_bytes: int) -> None:
|
148 |
+
if self._done:
|
149 |
+
return
|
150 |
+
|
151 |
+
# The buffer we got is byte oriented, and a character may span in more than one
|
152 |
+
# buffers. In case the last one or two byte in last buffer is not
|
153 |
+
# complete, we record how many byte needed to complete that character
|
154 |
+
# and skip these bytes here. We can choose to record those bytes as
|
155 |
+
# well and analyse the character once it is complete, but since a
|
156 |
+
# character will not make much difference, by simply skipping
|
157 |
+
# this character will simply our logic and improve performance.
|
158 |
+
i = self._need_to_skip_char_num
|
159 |
+
while i < num_bytes:
|
160 |
+
order, char_len = self.get_order(byte_str[i : i + 2])
|
161 |
+
i += char_len
|
162 |
+
if i > num_bytes:
|
163 |
+
self._need_to_skip_char_num = i - num_bytes
|
164 |
+
self._last_char_order = -1
|
165 |
+
else:
|
166 |
+
if (order != -1) and (self._last_char_order != -1):
|
167 |
+
self._total_rel += 1
|
168 |
+
if self._total_rel > self.MAX_REL_THRESHOLD:
|
169 |
+
self._done = True
|
170 |
+
break
|
171 |
+
self._rel_sample[
|
172 |
+
jp2_char_context[self._last_char_order][order]
|
173 |
+
] += 1
|
174 |
+
self._last_char_order = order
|
175 |
+
|
176 |
+
def got_enough_data(self) -> bool:
|
177 |
+
return self._total_rel > self.ENOUGH_REL_THRESHOLD
|
178 |
+
|
179 |
+
def get_confidence(self) -> float:
|
180 |
+
# This is just one way to calculate confidence. It works well for me.
|
181 |
+
if self._total_rel > self.MINIMUM_DATA_THRESHOLD:
|
182 |
+
return (self._total_rel - self._rel_sample[0]) / self._total_rel
|
183 |
+
return self.DONT_KNOW
|
184 |
+
|
185 |
+
def get_order(self, _: Union[bytes, bytearray]) -> Tuple[int, int]:
|
186 |
+
return -1, 1
|
187 |
+
|
188 |
+
|
189 |
+
class SJISContextAnalysis(JapaneseContextAnalysis):
|
190 |
+
def __init__(self) -> None:
|
191 |
+
super().__init__()
|
192 |
+
self._charset_name = "SHIFT_JIS"
|
193 |
+
|
194 |
+
@property
|
195 |
+
def charset_name(self) -> str:
|
196 |
+
return self._charset_name
|
197 |
+
|
198 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> Tuple[int, int]:
|
199 |
+
if not byte_str:
|
200 |
+
return -1, 1
|
201 |
+
# find out current char's byte length
|
202 |
+
first_char = byte_str[0]
|
203 |
+
if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC):
|
204 |
+
char_len = 2
|
205 |
+
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
|
206 |
+
self._charset_name = "CP932"
|
207 |
+
else:
|
208 |
+
char_len = 1
|
209 |
+
|
210 |
+
# return its order if it is hiragana
|
211 |
+
if len(byte_str) > 1:
|
212 |
+
second_char = byte_str[1]
|
213 |
+
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
|
214 |
+
return second_char - 0x9F, char_len
|
215 |
+
|
216 |
+
return -1, char_len
|
217 |
+
|
218 |
+
|
219 |
+
class EUCJPContextAnalysis(JapaneseContextAnalysis):
|
220 |
+
def get_order(self, byte_str: Union[bytes, bytearray]) -> Tuple[int, int]:
|
221 |
+
if not byte_str:
|
222 |
+
return -1, 1
|
223 |
+
# find out current char's byte length
|
224 |
+
first_char = byte_str[0]
|
225 |
+
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
|
226 |
+
char_len = 2
|
227 |
+
elif first_char == 0x8F:
|
228 |
+
char_len = 3
|
229 |
+
else:
|
230 |
+
char_len = 1
|
231 |
+
|
232 |
+
# return its order if it is hiragana
|
233 |
+
if len(byte_str) > 1:
|
234 |
+
second_char = byte_str[1]
|
235 |
+
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
|
236 |
+
return second_char - 0xA1, char_len
|
237 |
+
|
238 |
+
return -1, char_len
|