diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f8b5a247c9a43c792566d8e5fecb63d2d9ad361c Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/exceptions.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0509672e553c78a1c1552a44c7e679ccec74b758 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/exceptions.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/lazy.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/lazy.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1af6e7d244f90526d70cdf8ea128086b91014244 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/lazy.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/reference.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/reference.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..94142e9ec2289b9bb47657304342b7ef0913ef04 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/reference.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/tzfile.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/tzfile.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d675194146ebf504c0eec482b57016b15dfb325b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/tzfile.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/tzinfo.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/tzinfo.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93e1dbff9d5498afecc0ad5492faebbd474965f1 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/__pycache__/tzinfo.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Aden b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Aden new file mode 100644 index 0000000000000000000000000000000000000000..8c8062471dce91a5be827d6908795ee7391a4afc Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Aden differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Beirut b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Beirut new file mode 100644 index 0000000000000000000000000000000000000000..fb266ede2279b6aff913538d9d5aae3935e53aeb Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Beirut differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Macao b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Macao new file mode 100644 index 0000000000000000000000000000000000000000..cac65063d0dbf48e37c547fba3b67f34110d5a90 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Macao differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Novosibirsk b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Novosibirsk new file mode 100644 index 0000000000000000000000000000000000000000..4ac7582ad5ee895bf9b354436142fd7ece1996ee Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Novosibirsk differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Pontianak b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Pontianak new file mode 100644 index 0000000000000000000000000000000000000000..12ce24cbeae404efe6921081d21289be452ff88d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Asia/Pontianak differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Antananarivo b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Antananarivo new file mode 100644 index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Antananarivo differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Chagos b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Chagos new file mode 100644 index 0000000000000000000000000000000000000000..a5554816e2928c2bd5d02e032bbeb1e1cb101009 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Chagos differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Christmas b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Christmas new file mode 100644 index 0000000000000000000000000000000000000000..fa799db39e7625dd74bd9caa5c29b4819a7cbd3f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Christmas differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Cocos b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Cocos new file mode 100644 index 0000000000000000000000000000000000000000..eef37b42e8a0e7179f8113bea01f4a71d668e8ef Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Cocos differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Comoro b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Comoro new file mode 100644 index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Comoro differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Kerguelen b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Kerguelen new file mode 100644 index 0000000000000000000000000000000000000000..555728b1a0187cc0ac63b8fe45c44bd1e0957918 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Kerguelen differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mahe b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mahe new file mode 100644 index 0000000000000000000000000000000000000000..b3ac791aef4e73d6d644c40c614f37f15d462cdd Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mahe differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Maldives b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Maldives new file mode 100644 index 0000000000000000000000000000000000000000..555728b1a0187cc0ac63b8fe45c44bd1e0957918 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Maldives differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mauritius b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mauritius new file mode 100644 index 0000000000000000000000000000000000000000..212d4b2e2afaed06110a1acff4fdb6bd6103b4ff Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mauritius differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mayotte b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mayotte new file mode 100644 index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Mayotte differ diff --git a/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Reunion b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Reunion new file mode 100644 index 0000000000000000000000000000000000000000..b3ac791aef4e73d6d644c40c614f37f15d462cdd Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pytz/zoneinfo/Indian/Reunion differ diff --git a/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/INSTALLER b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/LICENSE.txt b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/METADATA b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..9c93e69a0fd11f7b218e1c8b4d6c8a897e6ead51 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/METADATA @@ -0,0 +1,1124 @@ +Metadata-Version: 2.1 +Name: sacrebleu +Version: 2.4.2 +Summary: Hassle-free computation of shareable, comparable, and reproducible BLEU, chrF, and TER scores +Home-page: https://github.com/mjpost/sacrebleu +Author: Matt Post +Author-email: post@cs.jhu.edu +Maintainer-email: post@cs.jhu.edu +License: Apache License 2.0 +Keywords: machine translation, evaluation, NLP, natural language processing, computational linguistics +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Text Processing +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Typing :: Typed +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE.txt +Requires-Dist: portalocker +Requires-Dist: regex +Requires-Dist: tabulate (>=0.8.9) +Requires-Dist: numpy (>=1.17) +Requires-Dist: colorama +Requires-Dist: lxml +Provides-Extra: dev +Requires-Dist: wheel ; extra == 'dev' +Requires-Dist: pytest ; extra == 'dev' +Requires-Dist: mypy ; extra == 'dev' +Requires-Dist: types-tabulate ; extra == 'dev' +Requires-Dist: lxml-stubs ; extra == 'dev' +Provides-Extra: ja +Requires-Dist: mecab-python3 (<=1.0.6,>=1.0.5) ; extra == 'ja' +Requires-Dist: ipadic (<2.0,>=1.0) ; extra == 'ja' +Provides-Extra: ko +Requires-Dist: mecab-ko (<=1.0.1,>=1.0.0) ; extra == 'ko' +Requires-Dist: mecab-ko-dic (<2.0,>=1.0) ; extra == 'ko' + +# sacreBLEU + +[![PyPI version](https://img.shields.io/pypi/v/sacrebleu)](https://img.shields.io/pypi/v/sacrebleu) +[![Python version](https://img.shields.io/pypi/pyversions/sacrebleu)](https://img.shields.io/pypi/pyversions/sacrebleu) +[![GitHub issues](https://img.shields.io/github/issues/mjpost/sacreBLEU.svg)](https://github.com/mjpost/sacrebleu/issues) + +SacreBLEU ([Post, 2018](http://aclweb.org/anthology/W18-6319)) provides hassle-free computation of shareable, comparable, and reproducible **BLEU** scores. +Inspired by Rico Sennrich's `multi-bleu-detok.perl`, it produces the official WMT scores but works with plain text. +It also knows all the standard test sets and handles downloading, processing, and tokenization for you. + +The official version is hosted at . + +# Motivation + +Comparing BLEU scores is harder than it should be. Every decoder has its own implementation, often borrowed from Moses, but maybe with subtle changes. +Moses itself has a number of implementations as standalone scripts, with little indication of how they differ (note: they mostly don't, but `multi-bleu.pl` expects tokenized input). Different flags passed to each of these scripts can produce wide swings in the final score. All of these may handle tokenization in different ways. On top of this, downloading and managing test sets is a moderate annoyance. + +Sacre bleu! What a mess. + +**SacreBLEU** aims to solve these problems by wrapping the original reference implementation ([Papineni et al., 2002](https://www.aclweb.org/anthology/P02-1040.pdf)) together with other useful features. +The defaults are set the way that BLEU should be computed, and furthermore, the script outputs a short version string that allows others to know exactly what you did. +As an added bonus, it automatically downloads and manages test sets for you, so that you can simply tell it to score against `wmt14`, without having to hunt down a path on your local file system. +It is all designed to take BLEU a little more seriously. +After all, even with all its problems, BLEU is the default and---admit it---well-loved metric of our entire research community. +Sacre BLEU. + +# Features + +- It automatically downloads common WMT test sets and processes them to plain text +- It produces a short version string that facilitates cross-paper comparisons +- It properly computes scores on detokenized outputs, using WMT ([Conference on Machine Translation](http://statmt.org/wmt17)) standard tokenization +- It produces the same values as the official script (`mteval-v13a.pl`) used by WMT +- It outputs the BLEU score without the comma, so you don't have to remove it with `sed` (Looking at you, `multi-bleu.perl`) +- It supports different tokenizers for BLEU including support for Japanese and Chinese +- It supports **chrF, chrF++** and **Translation error rate (TER)** metrics +- It performs paired bootstrap resampling and paired approximate randomization tests for statistical significance reporting + +# Breaking Changes + +## v2.0.0 + +As of v2.0.0, the default output format is changed to `json` for less painful parsing experience. This means that software that parse the output of sacreBLEU should be modified to either (i) parse the JSON using for example the `jq` utility or (ii) pass `-f text` to sacreBLEU to preserve the old textual output. The latter change can also be made **persistently** by exporting `SACREBLEU_FORMAT=text` in relevant shell configuration files. + +Here's an example of parsing the `score` key of the JSON output using `jq`: + +``` +$ sacrebleu -i output.detok.txt -t wmt17 -l en-de | jq -r .score +20.8 +``` + +# Installation + +Install the official Python module from PyPI (**Python>=3.6 only**): + + pip install sacrebleu + +In order to install Japanese tokenizer support through `mecab-python3`, you need to run the +following command instead, to perform a full installation with dependencies: + + pip install "sacrebleu[ja]" + +In order to install Korean tokenizer support through `pymecab-ko`, you need to run the +following command instead, to perform a full installation with dependencies: + + pip install "sacrebleu[ko]" + +# Command-line Usage + +You can get a list of available test sets with `sacrebleu --list`. Please see [DATASETS.md](DATASETS.md) +for an up-to-date list of supported datasets. You can also list available test sets for a given language pair +with `sacrebleu --list -l en-fr`. + +## Basics + +### Downloading test sets + +Downloading is triggered when you request a test set. If the dataset is not available, it is downloaded +and unpacked. + +E.g., you can use the following commands to download the source, pass it through your translation system +in `translate.sh`, and then score it: + +``` +$ sacrebleu -t wmt17 -l en-de --echo src > wmt17.en-de.en +$ cat wmt17.en-de.en | translate.sh | sacrebleu -t wmt17 -l en-de +``` + +Some test sets also have the outputs of systems that were submitted to the task. +For example, the `wmt/systems` test set. + +```bash +$ sacrebleu -t wmt21/systems -l zh-en --echo NiuTrans +``` + +This provides a convenient way to score: + +```bash +$ sacrebleu -t wmt21/system -l zh-en --echo NiuTrans | sacrebleu -t wmt21/systems -l zh-en +`` + +You can see a list of the available outputs by passing an invalid value to `--echo`. + +### JSON output + +As of version `>=2.0.0`, sacreBLEU prints the computed scores in JSON format to make parsing less painful: + +``` +$ sacrebleu -i output.detok.txt -t wmt17 -l en-de +``` + +```json +{ + "name": "BLEU", + "score": 20.8, + "signature": "nrefs:1|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0", + "verbose_score": "54.4/26.6/14.9/8.7 (BP = 1.000 ratio = 1.026 hyp_len = 62880 ref_len = 61287)", + "nrefs": "1", + "case": "mixed", + "eff": "no", + "tok": "13a", + "smooth": "exp", + "version": "2.0.0" +} +``` + +If you want to keep the old behavior, you can pass `-f text` or export `SACREBLEU_FORMAT=text`: + +``` +$ sacrebleu -i output.detok.txt -t wmt17 -l en-de -f text +BLEU|nrefs:1|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 = 20.8 54.4/26.6/14.9/8.7 (BP = 1.000 ratio = 1.026 hyp_len = 62880 ref_len = 61287) +``` + +### Scoring + +(All examples below assume old-style text output for a compact representation that save space) + +Let's say that you just translated the `en-de` test set of WMT17 with your fancy MT system and the **detokenized** translations are in a file called `output.detok.txt`: + +``` +# Option 1: Redirect system output to STDIN +$ cat output.detok.txt | sacrebleu -t wmt17 -l en-de +BLEU|nrefs:1|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 = 20.8 54.4/26.6/14.9/8.7 (BP = 1.000 ratio = 1.026 hyp_len = 62880 ref_len = 61287) + +# Option 2: Use the --input/-i argument +$ sacrebleu -t wmt17 -l en-de -i output.detok.txt +BLEU|nrefs:1|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 = 20.8 54.4/26.6/14.9/8.7 (BP = 1.000 ratio = 1.026 hyp_len = 62880 ref_len = 61287) +``` + +You can obtain a short version of the signature with `--short/-sh`: + +``` +$ sacrebleu -t wmt17 -l en-de -i output.detok.txt -sh +BLEU|#:1|c:mixed|e:no|tok:13a|s:exp|v:2.0.0 = 20.8 54.4/26.6/14.9/8.7 (BP = 1.000 ratio = 1.026 hyp_len = 62880 ref_len = 61287) +``` + +If you only want the score to be printed, you can use the `--score-only/-b` flag: + +``` +$ sacrebleu -t wmt17 -l en-de -i output.detok.txt -b +20.8 +``` + +The precision of the scores can be configured via the `--width/-w` flag: + +``` +$ sacrebleu -t wmt17 -l en-de -i output.detok.txt -b -w 4 +20.7965 +``` + +### Using your own reference file + +SacreBLEU knows about common test sets (as detailed in the `--list` example above), but you can also use it to score system outputs with arbitrary references. In this case, do not forget to provide **detokenized** reference and hypotheses files: + +``` +# Let's save the reference to a text file +$ sacrebleu -t wmt17 -l en-de --echo ref > ref.detok.txt + +# Option 1: Pass the reference file as a positional argument to sacreBLEU +$ sacrebleu ref.detok.txt -i output.detok.txt -m bleu -b -w 4 +20.7965 + +# Option 2: Redirect the system into STDIN (Compatible with multi-bleu.perl way of doing things) +$ cat output.detok.txt | sacrebleu ref.detok.txt -m bleu -b -w 4 +20.7965 +``` + +### Using multiple metrics + +Let's first compute BLEU, chrF and TER with the default settings: + +``` +$ sacrebleu -t wmt17 -l en-de -i output.detok.txt -m bleu chrf ter + BLEU|nrefs:1|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 = 20.8 + chrF2|nrefs:1|case:mixed|eff:yes|nc:6|nw:0|space:no|version:2.0.0 = 52.0 +TER|nrefs:1|case:lc|tok:tercom|norm:no|punct:yes|asian:no|version:2.0.0 = 69.0 +``` + +Let's now enable `chrF++` which is a revised version of chrF that takes into account word n-grams. +Observe how the `nw:0` gets changed into `nw:2` in the signature: + +``` +$ sacrebleu -t wmt17 -l en-de -i output.detok.txt -m bleu chrf ter --chrf-word-order 2 + BLEU|nrefs:1|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 = 20.8 + chrF2++|nrefs:1|case:mixed|eff:yes|nc:6|nw:2|space:no|version:2.0.0 = 49.0 +TER|nrefs:1|case:lc|tok:tercom|norm:no|punct:yes|asian:no|version:2.0.0 = 69.0 +``` + +Metric-specific arguments are detailed in the output of `--help`: + +``` +BLEU related arguments: + --smooth-method {none,floor,add-k,exp}, -s {none,floor,add-k,exp} + Smoothing method: exponential decay, floor (increment zero counts), add-k (increment num/denom by k for n>1), or none. (Default: exp) + --smooth-value BLEU_SMOOTH_VALUE, -sv BLEU_SMOOTH_VALUE + The smoothing value. Only valid for floor and add-k. (Defaults: floor: 0.1, add-k: 1) + --tokenize {none,zh,13a,char,intl,ja-mecab,ko-mecab}, -tok {none,zh,13a,char,intl,ja-mecab,ko-mecab} + Tokenization method to use for BLEU. If not provided, defaults to `zh` for Chinese, `ja-mecab` for Japanese, `ko-mecab` for Korean and `13a` (mteval) otherwise. + --lowercase, -lc If True, enables case-insensitivity. (Default: False) + --force Insist that your tokenized input is actually detokenized. + +chrF related arguments: + --chrf-char-order CHRF_CHAR_ORDER, -cc CHRF_CHAR_ORDER + Character n-gram order. (Default: 6) + --chrf-word-order CHRF_WORD_ORDER, -cw CHRF_WORD_ORDER + Word n-gram order (Default: 0). If equals to 2, the metric is referred to as chrF++. + --chrf-beta CHRF_BETA + Determine the importance of recall w.r.t precision. (Default: 2) + --chrf-whitespace Include whitespaces when extracting character n-grams. (Default: False) + --chrf-lowercase Enable case-insensitivity. (Default: False) + --chrf-eps-smoothing Enables epsilon smoothing similar to chrF++.py, NLTK and Moses; instead of effective order smoothing. (Default: False) + +TER related arguments (The defaults replicate TERCOM's behavior): + --ter-case-sensitive Enables case sensitivity (Default: False) + --ter-asian-support Enables special treatment of Asian characters (Default: False) + --ter-no-punct Removes punctuation. (Default: False) + --ter-normalized Applies basic normalization and tokenization. (Default: False) +``` + +### Version Signatures +As you may have noticed, sacreBLEU generates version strings such as `BLEU|nrefs:1|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0` for reproducibility reasons. It's strongly recommended to share these signatures in your papers! + +### Outputting other metadata + +Sacrebleu knows about metadata for some test sets, and you can output it like this: + +``` +$ sacrebleu -t wmt21 -l en-de --echo src docid ref | head 2 +Couple MACED at California dog park for not wearing face masks while having lunch (VIDEO) - RT USA News rt.com.131279 Paar in Hundepark in Kalifornien mit Pfefferspray besprüht, weil es beim Mittagessen keine Masken trug (VIDEO) - RT USA News +There's mask-shaming and then there's full on assault. rt.com.131279 Masken-Shaming ist eine Sache, Körperverletzung eine andere. +``` + +If multiple fields are requested, they are output as tab-separated columns (a TSV). + +To see the available fields, add `--echo asdf` (or some other garbage data): + +``` +$ sacrebleu -t wmt21 -l en-de --echo asdf +sacreBLEU: No such field asdf in test set wmt21 for language pair en-de. +sacreBLEU: available fields for wmt21/en-de: src, ref:A, ref, docid, origlang +``` + +## Translationese Support + +If you are interested in the translationese effect, you can evaluate BLEU on a subset of sentences +with a given original language (identified based on the `origlang` tag in the raw SGM files). +E.g., to evaluate only against originally German sentences translated to English use: + + $ sacrebleu -t wmt13 -l de-en --origlang=de -i my-wmt13-output.txt + +and to evaluate against the complement (in this case `origlang` en, fr, cs, ru, de) use: + + $ sacrebleu -t wmt13 -l de-en --origlang=non-de -i my-wmt13-output.txt + +**Please note** that the evaluator will return a BLEU score only on the requested subset, +but it expects that you pass through the entire translated test set. + +## Languages & Preprocessing + +### BLEU + +- You can compute case-insensitive BLEU by passing `--lowercase` to sacreBLEU +- The default tokenizer for BLEU is `13a` which mimics the `mteval-v13a` script from Moses. +- Other tokenizers are: + - `none` which will not apply any kind of tokenization at all + - `char` for language-agnostic character-level tokenization + - `intl` applies international tokenization and mimics the `mteval-v14` script from Moses + - `zh` separates out **Chinese** characters and tokenizes the non-Chinese parts using `13a` tokenizer + - `ja-mecab` tokenizes **Japanese** inputs using the [MeCab](https://pypi.org/project/mecab-python3) morphological analyzer + - `ko-mecab` tokenizes **Korean** inputs using the [MeCab-ko](https://pypi.org/project/mecab-ko) morphological analyzer + - `flores101` and `flores200` uses the SentencePiece model built from the Flores-101 and [Flores-200](https://github.com/facebookresearch/flores/blob/main/flores200/README.md#languages-in-flores-200) dataset, respectively. Note: the canonical .spm file will be automatically fetched if not found locally. +- You can switch tokenizers using the `--tokenize` flag of sacreBLEU. Alternatively, if you provide language-pair strings + using `--language-pair/-l`, `zh`, `ja-mecab` and `ko-mecab` tokenizers will be used if the target language is `zh` or `ja` or `ko`, respectively. +- **Note that** there's no automatic language detection from the hypotheses so you need to make sure that you are correctly + selecting the tokenizer for **Japanese**, **Korean** and **Chinese**. + + +Default 13a tokenizer will produce poor results for Japanese: + +``` +$ sacrebleu kyoto-test.ref.ja -i kyoto-test.hyp.ja -b +2.1 +``` + +Let's use the `ja-mecab` tokenizer: +``` +$ sacrebleu kyoto-test.ref.ja -i kyoto-test.hyp.ja --tokenize ja-mecab -b +14.5 +``` + +If you provide the language-pair, sacreBLEU will use ja-mecab automatically: + +``` +$ sacrebleu kyoto-test.ref.ja -i kyoto-test.hyp.ja -l en-ja -b +14.5 +``` + +### chrF / chrF++ + +chrF applies minimum to none pre-processing as it deals with character n-grams: + +- If you pass `--chrf-whitespace`, whitespace characters will be preserved when computing character n-grams. +- If you pass `--chrf-lowercase`, sacreBLEU will compute case-insensitive chrF. +- If you enable non-zero `--chrf-word-order` (pass `2` for `chrF++`), a very simple punctuation tokenization will be internally applied. + + +### TER + +Translation Error Rate (TER) has its own special tokenizer that you can configure through the command line. +The defaults provided are **compatible with the upstream TER implementation (TERCOM)** but you can nevertheless modify the +behavior through the command-line: + +- TER is by default case-insensitive. Pass `--ter-case-sensitive` to enable case-sensitivity. +- Pass `--ter-normalize` to apply a general Western tokenization +- Pass `--ter-asian-support` to enable the tokenization of Asian characters. If provided with `--ter-normalize`, + both will be applied. +- Pass `--ter-no-punct` to strip punctuation. + +## Multi-reference Evaluation + +All three metrics support the use of multiple references during evaluation. Let's first pass all references as positional arguments: + +``` +$ sacrebleu ref1 ref2 -i system -m bleu chrf ter + BLEU|nrefs:2|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 = 61.8 + chrF2|nrefs:2|case:mixed|eff:yes|nc:6|nw:0|space:no|version:2.0.0 = 75.0 +TER|nrefs:2|case:lc|tok:tercom|norm:no|punct:yes|asian:no|version:2.0.0 = 31.2 +``` + +Alternatively (less recommended), we can concatenate references using tabs as delimiters as well. Don't forget to pass `--num-refs/-nr` in this case! + +``` +$ paste ref1 ref2 > refs.tsv + +$ sacrebleu refs.tsv --num-refs 2 -i system -m bleu +BLEU|nrefs:2|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 = 61.8 +``` + +## Multi-system Evaluation +As of version `>=2.0.0`, SacreBLEU supports evaluation of an arbitrary number of systems for a particular +test set and language-pair. This has the advantage of seeing all results in a +nicely formatted table. + +Let's pass all system output files that match the shell glob `newstest2017.online-*` to sacreBLEU for evaluation: + +``` +$ sacrebleu -t wmt17 -l en-de -i newstest2017.online-* -m bleu chrf +╒═══════════════════════════════╤════════╤═════════╕ +│ System │ BLEU │ chrF2 │ +╞═══════════════════════════════╪════════╪═════════╡ +│ newstest2017.online-A.0.en-de │ 20.8 │ 52.0 │ +├───────────────────────────────┼────────┼─────────┤ +│ newstest2017.online-B.0.en-de │ 26.7 │ 56.3 │ +├───────────────────────────────┼────────┼─────────┤ +│ newstest2017.online-F.0.en-de │ 15.5 │ 49.3 │ +├───────────────────────────────┼────────┼─────────┤ +│ newstest2017.online-G.0.en-de │ 18.2 │ 51.6 │ +╘═══════════════════════════════╧════════╧═════════╛ + +----------------- +Metric signatures +----------------- + - BLEU nrefs:1|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 + - chrF2 nrefs:1|case:mixed|eff:yes|nc:6|nw:0|space:no|version:2.0.0 +``` + +You can also change the output format to `latex`: + +``` +$ sacrebleu -t wmt17 -l en-de -i newstest2017.online-* -m bleu chrf -f latex +\begin{tabular}{rcc} +\toprule + System & BLEU & chrF2 \\ +\midrule + newstest2017.online-A.0.en-de & 20.8 & 52.0 \\ + newstest2017.online-B.0.en-de & 26.7 & 56.3 \\ + newstest2017.online-F.0.en-de & 15.5 & 49.3 \\ + newstest2017.online-G.0.en-de & 18.2 & 51.6 \\ +\bottomrule +\end{tabular} + +... +``` + +## Confidence Intervals for Single System Evaluation + +When enabled with the `--confidence` flag, SacreBLEU will print +(1) the actual system score, (2) the true mean estimated from bootstrap resampling and (3), +the 95% [confidence interval](https://en.wikipedia.org/wiki/Confidence_interval) around the mean. +By default, the number of bootstrap resamples is 1000 (`bs:1000` in the signature) +and can be changed with `--confidence-n`: + +``` +$ sacrebleu -t wmt17 -l en-de -i output.detok.txt -m bleu chrf --confidence -f text --short + BLEU|#:1|bs:1000|rs:12345|c:mixed|e:no|tok:13a|s:exp|v:2.0.0 = 22.675 (μ = 22.669 ± 0.598) ... +chrF2|#:1|bs:1000|rs:12345|c:mixed|e:yes|nc:6|nw:0|s:no|v:2.0.0 = 51.953 (μ = 51.953 ± 0.462) +``` + +**NOTE:** Although provided as a functionality, having access to confidence intervals for just one system +may not reveal much information about the underlying model. It often makes more sense to perform +**paired statistical tests** across multiple systems. + +**NOTE:** When resampling, the seed of the `numpy`'s random number generator (RNG) +is fixed to `12345`. If you want to relax this and set your own seed, you can +export the environment variable `SACREBLEU_SEED` to an integer. Alternatively, you can export +`SACREBLEU_SEED=None` to skip initializing the RNG's seed and allow for non-deterministic +behavior. + +## Paired Significance Tests for Multi System Evaluation +Ideally, one would have access to many systems in cases such as (1) investigating +whether a newly added feature yields significantly different scores than the baseline or +(2) evaluating submissions for a particular shared task. SacreBLEU offers two different paired significance tests that are widely used in MT research. + +### Paired bootstrap resampling (--paired-bs) + +This is an efficient implementation of the paper [Statistical Significance Tests for Machine Translation Evaluation](https://www.aclweb.org/anthology/W04-3250.pdf) and is result-compliant with the [reference Moses implementation](https://github.com/moses-smt/mosesdecoder/blob/master/scripts/analysis/bootstrap-hypothesis-difference-significance.pl). The number of bootstrap resamples can be changed with the `--paired-bs-n` flag and its default is 1000. + +When launched, paired bootstrap resampling will perform: + - Bootstrap resampling to estimate 95% CI for all systems and the baseline + - A significance test between the **baseline** and each **system** to compute a [p-value](https://en.wikipedia.org/wiki/P-value). + +### Paired approximate randomization (--paired-ar) + +Paired approximate randomization (AR) is another type of paired significance test that is claimed to be more accurate than paired bootstrap resampling when it comes to Type-I errors ([Riezler and Maxwell III, 2005](https://www.aclweb.org/anthology/W05-0908.pdf)). Type-I errors indicate failures to reject the null hypothesis when it is true. In other words, AR should in theory be more robust to subtle changes across systems. + +Our implementation is verified to be result-compliant with the [Multeval toolkit](https://github.com/jhclark/multeval) that also uses paired AR test for pairwise comparison. The number of approximate randomization trials is set to 10,000 by default. This can be changed with the `--paired-ar-n` flag. + +### Running the tests + +- The **first system** provided to `--input/-i` will be automatically taken as the **baseline system** against which you want to compare **other systems.** +- When `--input/-i` is used, the system output files will be automatically named according to the file paths. For the sake of simplicity, SacreBLEU will automatically discard the **baseline system** if it also appears amongst **other systems**. This is useful if you would like to run the tool by passing `-i systems/baseline.txt systems/*.txt`. Here, the `baseline.txt` file will not be also considered as a candidate system. +- Alternatively, you can also use a tab-separated input file redirected to SacreBLEU. In this case, the first column hypotheses will be taken as the **baseline system**. However, this method is **not recommended** as it won't allow naming your systems in a human-readable way. It will instead enumerate the systems from 1 to N following the column order in the tab-separated input. +- On Linux and Mac OS X, you can launch the tests on multiple CPU's by passing the flag `--paired-jobs N`. If `N == 0`, SacreBLEU will launch one worker for each pairwise comparison. If `N > 0`, `N` worker processes will be spawned. This feature will substantially speed up the runtime especially if you want the **TER** metric to be computed. + +#### Example: Paired bootstrap resampling +In the example below, we select `newstest2017.LIUM-NMT.4900.en-de` as the baseline and compare it to 4 other WMT17 submissions using paired bootstrap resampling. According to the results, the null hypothesis (i.e. the two systems being essentially the same) could not be rejected (at the significance level of 0.05) for the following comparisons: + +- 0.1 BLEU difference between the baseline and the online-B system (p = 0.3077) + +``` +$ sacrebleu -t wmt17 -l en-de -i newstest2017.LIUM-NMT.4900.en-de newstest2017.online-* -m bleu chrf --paired-bs +╒════════════════════════════════════════════╤═════════════════════╤══════════════════════╕ +│ System │ BLEU (μ ± 95% CI) │ chrF2 (μ ± 95% CI) │ +╞════════════════════════════════════════════╪═════════════════════╪══════════════════════╡ +│ Baseline: newstest2017.LIUM-NMT.4900.en-de │ 26.6 (26.6 ± 0.6) │ 55.9 (55.9 ± 0.5) │ +├────────────────────────────────────────────┼─────────────────────┼──────────────────────┤ +│ newstest2017.online-A.0.en-de │ 20.8 (20.8 ± 0.6) │ 52.0 (52.0 ± 0.4) │ +│ │ (p = 0.0010)* │ (p = 0.0010)* │ +├────────────────────────────────────────────┼─────────────────────┼──────────────────────┤ +│ newstest2017.online-B.0.en-de │ 26.7 (26.6 ± 0.7) │ 56.3 (56.3 ± 0.5) │ +│ │ (p = 0.3077) │ (p = 0.0240)* │ +├────────────────────────────────────────────┼─────────────────────┼──────────────────────┤ +│ newstest2017.online-F.0.en-de │ 15.5 (15.4 ± 0.5) │ 49.3 (49.3 ± 0.4) │ +│ │ (p = 0.0010)* │ (p = 0.0010)* │ +├────────────────────────────────────────────┼─────────────────────┼──────────────────────┤ +│ newstest2017.online-G.0.en-de │ 18.2 (18.2 ± 0.5) │ 51.6 (51.6 ± 0.4) │ +│ │ (p = 0.0010)* │ (p = 0.0010)* │ +╘════════════════════════════════════════════╧═════════════════════╧══════════════════════╛ + +------------------------------------------------------------ +Paired bootstrap resampling test with 1000 resampling trials +------------------------------------------------------------ + - Each system is pairwise compared to Baseline: newstest2017.LIUM-NMT.4900.en-de. + Actual system score / bootstrap estimated true mean / 95% CI are provided for each metric. + + - Null hypothesis: the system and the baseline translations are essentially + generated by the same underlying process. For a given system and the baseline, + the p-value is roughly the probability of the absolute score difference (delta) + or higher occurring due to chance, under the assumption that the null hypothesis is correct. + + - Assuming a significance threshold of 0.05, the null hypothesis can be rejected + for p-values < 0.05 (marked with "*"). This means that the delta is unlikely to be attributed + to chance, hence the system is significantly "different" than the baseline. + Otherwise, the p-values are highlighted in red. + + - NOTE: Significance does not tell whether a system is "better" than the baseline but rather + emphasizes the "difference" of the systems in terms of the replicability of the delta. + +----------------- +Metric signatures +----------------- + - BLEU nrefs:1|bs:1000|seed:12345|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 + - chrF2 nrefs:1|bs:1000|seed:12345|case:mixed|eff:yes|nc:6|nw:0|space:no|version:2.0.0 +``` + +#### Example: Paired approximate randomization + +Let's now run the paired approximate randomization test for the same comparison. According to the results, the findings are compatible with the paired bootstrap resampling test. However, the p-value for the `baseline vs. online-B` comparison is much higher (`0.8066`) than the paired bootstrap resampling test. + +(**Note that** the AR test does not provide confidence intervals around the true mean as it does not perform bootstrap resampling.) + +``` +$ sacrebleu -t wmt17 -l en-de -i newstest2017.LIUM-NMT.4900.en-de newstest2017.online-* -m bleu chrf --paired-ar +╒════════════════════════════════════════════╤═══════════════╤═══════════════╕ +│ System │ BLEU │ chrF2 │ +╞════════════════════════════════════════════╪═══════════════╪═══════════════╡ +│ Baseline: newstest2017.LIUM-NMT.4900.en-de │ 26.6 │ 55.9 │ +├────────────────────────────────────────────┼───────────────┼───────────────┤ +│ newstest2017.online-A.0.en-de │ 20.8 │ 52.0 │ +│ │ (p = 0.0001)* │ (p = 0.0001)* │ +├────────────────────────────────────────────┼───────────────┼───────────────┤ +│ newstest2017.online-B.0.en-de │ 26.7 │ 56.3 │ +│ │ (p = 0.8066) │ (p = 0.0385)* │ +├────────────────────────────────────────────┼───────────────┼───────────────┤ +│ newstest2017.online-F.0.en-de │ 15.5 │ 49.3 │ +│ │ (p = 0.0001)* │ (p = 0.0001)* │ +├────────────────────────────────────────────┼───────────────┼───────────────┤ +│ newstest2017.online-G.0.en-de │ 18.2 │ 51.6 │ +│ │ (p = 0.0001)* │ (p = 0.0001)* │ +╘════════════════════════════════════════════╧═══════════════╧═══════════════╛ + +------------------------------------------------------- +Paired approximate randomization test with 10000 trials +------------------------------------------------------- + - Each system is pairwise compared to Baseline: newstest2017.LIUM-NMT.4900.en-de. + Actual system score is provided for each metric. + + - Null hypothesis: the system and the baseline translations are essentially + generated by the same underlying process. For a given system and the baseline, + the p-value is roughly the probability of the absolute score difference (delta) + or higher occurring due to chance, under the assumption that the null hypothesis is correct. + + - Assuming a significance threshold of 0.05, the null hypothesis can be rejected + for p-values < 0.05 (marked with "*"). This means that the delta is unlikely to be attributed + to chance, hence the system is significantly "different" than the baseline. + Otherwise, the p-values are highlighted in red. + + - NOTE: Significance does not tell whether a system is "better" than the baseline but rather + emphasizes the "difference" of the systems in terms of the replicability of the delta. + +----------------- +Metric signatures +----------------- + - BLEU nrefs:1|ar:10000|seed:12345|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 + - chrF2 nrefs:1|ar:10000|seed:12345|case:mixed|eff:yes|nc:6|nw:0|space:no|version:2.0.0 +``` + +# Using SacreBLEU from Python + +For evaluation, it may be useful to compute BLEU, chrF or TER from a Python script. The recommended +way of doing this is to use the object-oriented API, by creating an instance of the `metrics.BLEU` class +for example: + +```python +In [1]: from sacrebleu.metrics import BLEU, CHRF, TER + ...: + ...: refs = [ # First set of references + ...: ['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'], + ...: # Second set of references + ...: ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.'], + ...: ] + ...: sys = ['The dog bit the man.', "It wasn't surprising.", 'The man had just bitten him.'] + +In [2]: bleu = BLEU() + +In [3]: bleu.corpus_score(sys, refs) +Out[3]: BLEU = 48.53 82.4/50.0/45.5/37.5 (BP = 0.943 ratio = 0.944 hyp_len = 17 ref_len = 18) + +In [4]: bleu.get_signature() +Out[4]: nrefs:2|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 + +In [5]: chrf = CHRF() + +In [6]: chrf.corpus_score(sys, refs) +Out[6]: chrF2 = 59.73 +``` + +### Variable Number of References + +Let's now remove the first reference sentence for the first system sentence `The dog bit the man.` by replacing it with either `None` or the empty string `''`. +This allows using a variable number of reference segments per hypothesis. Observe how the signature changes from `nrefs:2` to `nrefs:var`: + +```python +In [1]: from sacrebleu.metrics import BLEU, CHRF, TER + ...: + ...: refs = [ # First set of references + # 1st sentence does not have a ref here + ...: ['', 'It was not unexpected.', 'The man bit him first.'], + ...: # Second set of references + ...: ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.'], + ...: ] + ...: sys = ['The dog bit the man.', "It wasn't surprising.", 'The man had just bitten him.'] + +In [2]: bleu = BLEU() + +In [3]: bleu.corpus_score(sys, refs) +Out[3]: BLEU = 29.44 82.4/42.9/27.3/12.5 (BP = 0.889 ratio = 0.895 hyp_len = 17 ref_len = 19) + +In [4]: bleu.get_signature() +Out[4]: nrefs:var|case:mixed|eff:no|tok:13a|smooth:exp|version:2.0.0 +``` + +## Compatibility API + +You can also use the compatibility API that provides wrapper functions around the object-oriented API to +compute sentence-level and corpus-level BLEU, chrF and TER: (It should be noted that this API can be +removed in future releases) + +```python +In [1]: import sacrebleu + ...: + ...: refs = [ # First set of references + ...: ['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'], + ...: # Second set of references + ...: ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.'], + ...: ] + ...: sys = ['The dog bit the man.', "It wasn't surprising.", 'The man had just bitten him.'] + +In [2]: sacrebleu.corpus_bleu(sys, refs) +Out[2]: BLEU = 48.53 82.4/50.0/45.5/37.5 (BP = 0.943 ratio = 0.944 hyp_len = 17 ref_len = 18) +``` + +# License + +SacreBLEU is licensed under the [Apache 2.0 License](LICENSE.txt). + +# Credits + +This was all [Rico Sennrich's idea](https://twitter.com/RicoSennrich/status/883246242763026433) +Originally written by Matt Post. +New features and ongoing support provided by Martin Popel (@martinpopel) and Ozan Caglayan (@ozancaglayan). + +If you use SacreBLEU, please cite the following: + +``` +@inproceedings{post-2018-call, + title = "A Call for Clarity in Reporting {BLEU} Scores", + author = "Post, Matt", + booktitle = "Proceedings of the Third Conference on Machine Translation: Research Papers", + month = oct, + year = "2018", + address = "Belgium, Brussels", + publisher = "Association for Computational Linguistics", + url = "https://www.aclweb.org/anthology/W18-6319", + pages = "186--191", +} +``` + +# Release Notes + +- 2.4.2 (2024-04-12) + Added: + - The CLI "--echo" now will return the domain for WMT22 and WMT23. + + Fixed: + - Default reference for wmt23:en-de + +- 2.4.1 (2024-03-12) + Fixed: + - Add exports to package __init__.py + +- 2.4.0 (2023-12-11) + Added: + - WMT23 test sets (test set `wmt23`) + +- 2.3.3 (2023-11-28) + Fixed: + - Typing issues (#249, #250) + - Improved builds (#252) + +- 2.3.2 (2023-11-06) + Fixed: + - Special treatment of empty references in TER (#232) + - Bump in mecab version for JA (#234) + + Added: + - Warning if `-tok spm` is used (use explicit `flores101` instead) (#238) + +- 2.3.1 (2022-10-18) + Bugfix: + - Set lru_cache to 2^16 for SPM tokenizer (was set to infinite) + +- 2.3.0 (2022-10-18) + Features: + - (#203) Added `-tok flores101` and `-tok flores200`, a.k.a. `spbleu`. + These are multilingual tokenizations that make use of the + multilingual SPM models released by Facebook and described in the + following papers: + * Flores-101: https://arxiv.org/abs/2106.03193 + * Flores-200: https://arxiv.org/abs/2207.04672 + - (#213) Added JSON formatting for multi-system output (thanks to Manikanta Inugurthi @me-manikanta) + - (#211) You can now list all test sets for a language pair with `--list SRC-TRG`. + Thanks to Jaume Zaragoza (@ZJaume) for adding this feature. + - Added WMT22 test sets (test set `wmt22`) + - System outputs: include with wmt22. Also added wmt21/systems which will produce WMT21 submitted systems. + To see available systems, give a dummy system to `--echo`, e.g., `sacrebleu -t wmt22 -l en-de --echo ?` + +- 2.2.1 (2022-09-13) + Bugfix: Standard usage was returning (and using) each reference twice. + +- 2.2.0 (2022-07-25) + Features: + - Added WMT21 datasets (thanks to @BrighXiaoHan) + - `--echo` now exposes document metadata where available (e.g., docid, genre, origlang) + - Bugfix: allow empty references (#161) + - Adds a Korean tokenizer (thanks to @NoUnique) + + Under the hood: + - Moderate code refactoring + - Processed files have adopted a more sensible internal naming scheme under ~/.sacrebleu + (e.g., wmt17_ms.zh-en.src instead of zh-en.zh) + - Processed file extensions correspond to the values passed to `--echo` (e.g., "src") + - Now explicitly representing NoneTokenizer + - Got rid of the ".lock" lockfile for downloading (using the tarball itself) + + Many thanks to @BrightXiaoHan (https://github.com/BrightXiaoHan) for the bulk of + the code contributions in this release. + +- 2.1.0 (2022-05-19) + Features: + - Added `-tok spm` for multilingual SPM tokenization (#168) + (thanks to Naman Goyal and James Cross at Facebook) + + Fixes: + - Handle potential memory usage issues due to LRU caching in tokenizers (#167) + - Bugfix: BLEU.corpus_score() now using max_ngram_order (#173) + - Upgraded ja-mecab to 1.0.5 (#196) + +- 2.0.0 (2021-07-18) + - Build: Add Windows and OS X testing to Travis CI. + - Improve documentation and type annotations. + - Drop `Python < 3.6` support and migrate to f-strings. + - Relax `portalocker` version pinning, add `regex, tabulate, numpy` dependencies. + - Drop input type manipulation through `isinstance` checks. If the user does not obey + to the expected annotations, exceptions will be raised. Robustness attempts lead to + confusions and obfuscated score errors in the past (#121) + - Variable # references per segment is supported for all metrics by default. It is + still only available through the API. + - Use colored strings in tabular outputs (multi-system evaluation mode) through + the help of `colorama` package. + - tokenizers: Add caching to tokenizers which seem to speed up things a bit. + - `intl` tokenizer: Use `regex` module. Speed goes from ~4 seconds to ~0.6 seconds + for a particular test set evaluation. (#46) + - Signature: Formatting changed (mostly to remove '+' separator as it was + interfering with chrF++). The field separator is now '|' and key values + are separated with ':' rather than '.'. + - Signature: Boolean true / false values are shortened to yes / no. + - Signature: Number of references is `var` if variable number of references is used. + - Signature: Add effective order (yes/no) to BLEU and chrF signatures. + - Metrics: Scale all metrics into the [0, 100] range (#140) + - Metrics API: Use explicit argument names and defaults for the metrics instead of + passing obscure `argparse.Namespace` objects. + - Metrics API: A base abstract `Metric` class is introduced to guide further + metric development. This class defines the methods that should be implemented + in the derived classes and offers boilerplate methods for the common functionality. + A new metric implemented this way will automatically support significance testing. + - Metrics API: All metrics now receive an optional `references` argument at + initialization time to process and cache the references. Further evaluations + of different systems against the same references becomes faster this way + for example when using significance testing. + - BLEU: In case of no n-gram matches at all, skip smoothing and return 0.0 BLEU (#141). + - CHRF: Added multi-reference support, verified the scores against chrF++.py, added test case. + - CHRF: Added chrF+ support through `word_order` argument. Added test cases against chrF++.py. + Exposed it through the CLI (--chrf-word-order) (#124) + - CHRF: Add possibility to disable effective order smoothing (pass --chrf-eps-smoothing). + This way, the scores obtained are exactly the same as chrF++, Moses and NLTK implementations. + We keep the effective ordering as the default for compatibility, since this only + affects sentence-level scoring with very short sentences. (#144) + - CLI: `--input/-i` can now ingest multiple systems. For this reason, the positional + `references` should always preceed the `-i` flag. + - CLI: Allow modifying TER arguments through CLI. We still keep the TERCOM defaults. + - CLI: Prefix metric-specific arguments with --chrf and --ter. To maintain compatibility, + BLEU argument names are kept the same. + - CLI: Separate metric-specific arguments for clarity when `--help` is printed. + - CLI: Added `--format/-f` flag. The single-system output mode is now `json` by default. + If you want to keep the old text format persistently, you can export `SACREBLEU_FORMAT=text` into your + shell. + - CLI: For multi-system mode, `json` falls back to plain text. `latex` output can only + be generated for multi-system mode. + - CLI: sacreBLEU now supports evaluating multiple systems for a given test set + in an efficient way. Through the use of `tabulate` package, the results are + nicely rendered into a plain text table, LaTeX, HTML or RST (cf. --format/-f argument). + The systems can be either given as a list of plain text files to `-i/--input` or + as a tab-separated single stream redirected into `STDIN`. In the former case, + the basenames of the files will be automatically used as system names. + - Statistical tests: sacreBLEU now supports confidence interval estimation + through bootstrap resampling for single-system evaluation (`--confidence` flag) + as well as paired bootstrap resampling (`--paired-bs`) and paired approximate + randomization tests (`--paired-ar`) when evaluating multiple systems (#40 and #78). + +- 1.5.1 (2021-03-05) + - Fix extraction error for WMT18 extra test sets (test-ts) (#142) + - Validation and test datasets are added for multilingual TEDx + +- 1.5.0 (2021-01-15) + - Fix an assertion error in chrF (#121) + - Add missing `__repr__()` methods for BLEU and TER + - TER: Fix exception when `--short` is used (#131) + - Pin Mecab version to 1.0.3 for Python 3.5 support + - [API Change]: Default value for `floor` smoothing is now 0.1 instead of 0. + - [API Change]: `sacrebleu.sentence_bleu()` now uses the `exp` smoothing method, + exactly the same as the CLI's --sentence-level behavior. This was mainly done + to make two methods behave the same. + - Add smoothing value to BLEU signature (#98) + - dataset: Fix IWSLT links (#128) + - Allow variable number of references for BLEU (only via API) (#130). + Thanks to Ondrej Dusek (@tuetschek) + +- 1.4.14 (2020-09-13) + - Added character-based tokenization (`-tok char`). + Thanks to Christian Federmann. + - Added TER (`-m ter`). Thanks to Ales Tamchyna! (fixes #90) + - Allow calling the script as a standalone utility (fixes #86) + - Fix type annotation issues (fixes #100) and mark sacrebleu as supporting mypy + - Added WMT20 robustness test sets: + - wmt20/robust/set1 (en-ja, en-de) + - wmt20/robust/set2 (en-ja, ja-en) + - wmt20/robust/set3 (de-en) + +- 1.4.13 (2020-07-30) + - Added WMT20 newstest test sets (#103) + - Make mecab3-python an extra dependency, adapt code to new mecab3-python + This fixes the recent Windows installation issues as well (#104) + Japanese support should now be explicitly installed through sacrebleu[ja] package. + - Fix return type annotation of corpus_bleu() + - Improve sentence_score's documentation, do not allow single ref string (#98) + +- 1.4.12 (2020-07-03) + - Fix a deployment bug (#96) + +- 1.4.11 (2020-07-03) + - Added Multi30k multimodal MT test set metadata + - Refactored all tokenizers into respective classes (fixes #85) + - Refactored all metrics into respective classes + - Moved utility functions into `utils.py` + - Implemented signatures using `BLEUSignature` and `CHRFSignature` classes + - Simplified checking of Chinese characters (fixes #5) + - Unified common regexp tokenization codes for tokenizers (fixes #27) + - Fixed --detail failing when no test sets are provided + - Fixed multi-reference BLEU failing when tab-delimited reference stream is used + - Removed lowercase option for ChrF which was not functional (#85) + - Simplified ChrF and used the same I/O logic as BLEU to allow for future + multi-reference reading + - Added score regression tests for chrF using reference chrF++ implementation + - Added multi-reference & tokenizer & signature tests + +- 1.4.10 (2020-05-30) + - Fixed bug in signature with mecab tokenizer + - Cleaned up deprecation warnings (thanks to Karthikeyan Singaravelan @tirkarthi) + - Now only lists the external [typing](https://pypi.org/project/typing/) + module as a dependency for Python `<= 3.4`, as it was integrated in the standard + library in Python 3.5 (thanks to Erwan de Lépinau @ErwanDL). + - Added LICENSE to pypi (thanks to Mark Harfouche @hmaarrfk) + +- 1.4.9 (2020-04-30) + - Changed `get_available_testsets()` to return a list + - Remove Japanese MeCab tokenizer from requirements. + (Must be installed manually to avoid Windows incompatibility). + Many thanks to Makoto Morishita (@MorinoseiMorizo). + +- 1.4.8 (2020-04-26) + - Added to API: + - get_source_file() + - get_reference_files() + - get_available_testsets() + - get_langpairs_for_testset() + - Some internal refactoring + - Fixed descriptions of some WMT19/google test sets + - Added API test case (test/test_apy.py) + +- 1.4.7 (2020-04-19) + - Added Google's extra wmt19/en-de refs (-t wmt19/google/{ar,arp,hqall,hqp,hqr,wmtp}) + (Freitag, Grangier, & Caswell + BLEU might be Guilty but References are not Innocent + https://arxiv.org/abs/2004.06063) + - Restored SACREBLEU_DIR and smart_open to exports (thanks to Thomas Liao @tholiao) + +- 1.4.6 (2020-03-28) + - Large internal reorganization as a module (thanks to Thamme Gowda @thammegowda) + +- 1.4.5 (2020-03-28) + - Added Japanese MeCab tokenizer (`-tok ja-mecab`) (thanks to Makoto Morishita @MorinoseiMorizo) + - Added wmt20/dev test sets (thanks to Martin Popel @martinpopel) + +- 1.4.4 (2020-03-10) + - Smoothing changes (Sebastian Nickels @sn1c) + - Fixed bug that only applied smoothing to n-grams for n > 2 + - Added default smoothing values for methods "floor" (0) and "add-k" (1) + - `--list` now returns a list of all language pairs for a task when combined with `-t` + (e.g., `sacrebleu -t wmt19 --list`) + - added missing languages for IWSLT17 + - Minor code improvements (Thomas Liao @tholiao) + +- 1.4.3 (2019-12-02) + - Bugfix: handling of result object for CHRF + - Improved API example + +- 1.4.2 (2019-10-11) + - Tokenization variant omitted from the chrF signature; it is relevant only for BLEU (thanks to Martin Popel) + - Bugfix: call to sentence_bleu (thanks to Rachel Bawden) + - Documentation example for Python API (thanks to Vlad Lyalin) + - Calls to corpus_chrf and sentence_chrf now return a an object instead of a float (use result.score) + +- 1.4.1 (2019-09-11) + - Added sentence-level scoring via -sl (--sentence-level) + +- 1.4.0 (2019-09-10) + - Many thanks to Martin Popel for all the changes below! + - Added evaluation on concatenated test sets (e.g., `-t wmt17,wmt18`). + Works as long as they all have the same language pair. + - Added `sacrebleu --origlang` (both for evaluation on a subset and for `--echo`). + Note that while echoing prints just the subset, evaluation expects the complete + test set (and just skips the irrelevant parts). + - Added `sacrebleu --detail` for breakdown by domain-specific subsets of the test sets. + (Available for WMT19). + - Minor changes + - Improved display of `sacrebleu -h` + - Added `sacrebleu --list` + - Code refactoring + - Documentation and tests updates + - Fixed a race condition bug (`os.makedirs(outdir, exist_ok=True)` instead of `if os.path.exists`) + +- 1.3.7 (2019-07-12) + - Lazy loading of regexes cuts import time from ~1s to nearly nothing (thanks, @louismartin!) + - Added a simple (non-atomic) lock on downloading + - Can now read multiple refs from a single tab-delimited file. + You need to pass `--num-refs N` to tell it to run the split. + Only works with a single reference file passed from the command line. + +- 1.3.6 (2019-06-10) + - Removed another f-string for Python 3.5 compatibility + +- 1.3.5 (2019-06-07) + - Restored Python 3.5 compatibility + +- 1.3.4 (2019-05-28) + - Added MTNT 2019 test sets + - Added a BLEU object + +- 1.3.3 (2019-05-08) + - Added WMT'19 test sets + +- 1.3.2 (2018-04-24) + - Bugfix in test case (thanks to Adam Roberts, @adarob) + - Passing smoothing method through `sentence_bleu` + +- 1.3.1 (2019-03-20) + - Added another smoothing approach (add-k) and a command-line option for choosing the smoothing method + (`--smooth exp|floor|add-n|none`) and the associated value (`--smooth-value`), when relevant. + - Changed interface to some functions (backwards incompatible) + - 'smooth' is now 'smooth_method' + - 'smooth_floor' is now 'smooth_value' + +- 1.2.21 (19 March 2019) + - Ctrl-M characters are now treated as normal characters, previously treated as newline. + +- 1.2.20 (28 February 2018) + - Tokenization now defaults to "zh" when language pair is known + +- 1.2.19 (19 February 2019) + - Updated checksum for wmt19/dev (seems to have changed) + +- 1.2.18 (19 February 2019) + - Fixed checksum for wmt17/dev (copy-paste error) + +- 1.2.17 (6 February 2019) + - Added kk-en and en-kk to wmt19/dev + +- 1.2.16 (4 February 2019) + - Added gu-en and en-gu to wmt19/dev + +- 1.2.15 (30 January 2019) + - Added MD5 checksumming of downloaded files for all datasets. + +- 1.2.14 (22 January 2019) + - Added mtnt1.1/train mtnt1.1/valid mtnt1.1/test data from [MTNT](http://www.cs.cmu.edu/~pmichel1/mtnt/) + +- 1.2.13 (22 January 2019) + - Added 'wmt19/dev' task for 'lt-en' and 'en-lt' (development data for new tasks). + - Added MD5 checksum for downloaded tarballs. + +- 1.2.12 (8 November 2018) + - Now outputs only only digit after the decimal + +- 1.2.11 (29 August 2018) + - Added a function for sentence-level, smoothed BLEU + +- 1.2.10 (23 May 2018) + - Added wmt18 test set (with references) + +- 1.2.9 (15 May 2018) + - Added zh-en, en-zh, tr-en, and en-tr datasets for wmt18/test-ts + +- 1.2.8 (14 May 2018) + - Added wmt18/test-ts, the test sources (only) for [WMT18](http://statmt.org/wmt18/translation-task.html) + - Moved README out of `sacrebleu.py` and the CHANGELOG into a separate file + +- 1.2.7 (10 April 2018) + - fixed another locale issue (with --echo) + - grudgingly enabled `-tok none` from the command line + +- 1.2.6 (22 March 2018) + - added wmt17/ms (Microsoft's [additional ZH-EN references](https://github.com/MicrosoftTranslator/Translator-HumanParityData)). + Try `sacrebleu -t wmt17/ms --cite`. + - `--echo ref` now pastes together all references, if there is more than one + +- 1.2.5 (13 March 2018) + - added wmt18/dev datasets (en-et and et-en) + - fixed logic with --force + - locale-independent installation + - added "--echo both" (tab-delimited) + +- 1.2.3 (28 January 2018) + - metrics (`-m`) are now printed in the order requested + - chrF now prints a version string (including the beta parameter, importantly) + - attempt to remove dependence on locale setting + +- 1.2 (17 January 2018) + - added the chrF metric (`-m chrf` or `-m bleu chrf` for both) + See 'CHRF: character n-gram F-score for automatic MT evaluation' by Maja Popovic (WMT 2015) + [http://www.statmt.org/wmt15/pdf/WMT49.pdf] + - added IWSLT 2017 test and tuning sets for DE, FR, and ZH + (Thanks to Mauro Cettolo and Marcello Federico). + - added `--cite` to produce the citation for easy inclusion in papers + - added `--input` (`-i`) to set input to a file instead of STDIN + - removed accent mark after objection from UN official + +- 1.1.7 (27 November 2017) + - corpus_bleu() now raises an exception if input streams are different lengths + - thanks to Martin Popel for: + - small bugfix in tokenization_13a (not affecting WMT references) + - adding `--tok intl` (international tokenization) + - added wmt17/dev and wmt17/dev sets (for languages intro'd those years) + +- 1.1.6 (15 November 2017) + - bugfix for tokenization warning + +- 1.1.5 (12 November 2017) + - added -b option (only output the BLEU score) + - removed fi-en from list of WMT16/17 systems with more than one reference + - added WMT16/tworefs and WMT17/tworefs for scoring with both en-fi references + +- 1.1.4 (10 November 2017) + - added effective order for sentence-level BLEU computation + - added unit tests from sockeye + +- 1.1.3 (8 November 2017). + - Factored code a bit to facilitate API: + - compute_bleu: works from raw stats + - corpus_bleu for use from the command line + - raw_corpus_bleu: turns off tokenization, command-line sanity checks, floor smoothing + - Smoothing (type 'exp', now the default) fixed to produce mteval-v13a.pl results + - Added 'floor' smoothing (adds 0.01 to 0 counts, more versatile via API), 'none' smoothing (via API) + - Small bugfixes, windows compatibility (H/T Christian Federmann) + +- 1.0.3 (4 November 2017). + - Contributions from Christian Federmann: + - Added explicit support for encoding + - Fixed Windows support + - Bugfix in handling reference length with multiple refs + +- version 1.0.1 (1 November 2017). + - Small bugfix affecting some versions of Python. + - Code reformatting due to Ozan Çağlayan. + +- version 1.0 (23 October 2017). + - Support for WMT 2008--2017. + - Single tokenization (v13a) with lowercase fix (proper lower() instead of just A-Z). + - Chinese tokenization. + - Tested to match all WMT17 scores on all arcs. + diff --git a/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/RECORD b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..a90794a5daf679d89abd93c10d49e9035e171cc5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/RECORD @@ -0,0 +1,75 @@ +../../../bin/sacrebleu,sha256=xGl1UQEIi3XGJ-ZmBESEe8PtLzNoFRFF8nqwn5TrLII,244 +sacrebleu-2.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +sacrebleu-2.4.2.dist-info/LICENSE.txt,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 +sacrebleu-2.4.2.dist-info/METADATA,sha256=nqHo74uPqdWFe4CAQXRARYbVcrPNfYjt8pCnOV6yzEM,58040 +sacrebleu-2.4.2.dist-info/RECORD,, +sacrebleu-2.4.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +sacrebleu-2.4.2.dist-info/entry_points.txt,sha256=oacOmm24wUB3Xd7hB1dBABMzlZh9ReRJRNhaKu1fDlw,55 +sacrebleu-2.4.2.dist-info/top_level.txt,sha256=H3cpHXPQtQtVbKIwDN8Au2t8nwrTQlA9omGuoF7tJOA,10 +sacrebleu/__init__.py,sha256=UUPHuhC7GiZVAAQCArR_33Tgk-kBShN07vaS233ye1k,1706 +sacrebleu/__main__.py,sha256=Eim2Tft9Xcoh5PJ4n23mVnpP36UmecOtCAH1Upl5kao,1062 +sacrebleu/__pycache__/__init__.cpython-310.pyc,, +sacrebleu/__pycache__/__main__.cpython-310.pyc,, +sacrebleu/__pycache__/compat.cpython-310.pyc,, +sacrebleu/__pycache__/sacrebleu.cpython-310.pyc,, +sacrebleu/__pycache__/significance.cpython-310.pyc,, +sacrebleu/__pycache__/utils.cpython-310.pyc,, +sacrebleu/compat.py,sha256=rq8s6SgH9xgBc2uK6JXoH0HWZ6CbcpP_4_X66nQhyCs,9100 +sacrebleu/dataset/__init__.py,sha256=1KRahpk98bOFnIQaZLGDy0XgndrHjQXotCvogSvZ0RQ,106142 +sacrebleu/dataset/__main__.py,sha256=4zJ7F7mtk2LKCGorTJe0YCtZ0al5lvrJNMrfrxl3_OQ,1250 +sacrebleu/dataset/__pycache__/__init__.cpython-310.pyc,, +sacrebleu/dataset/__pycache__/__main__.cpython-310.pyc,, +sacrebleu/dataset/__pycache__/base.cpython-310.pyc,, +sacrebleu/dataset/__pycache__/fake_sgml.cpython-310.pyc,, +sacrebleu/dataset/__pycache__/iwslt_xml.cpython-310.pyc,, +sacrebleu/dataset/__pycache__/plain_text.cpython-310.pyc,, +sacrebleu/dataset/__pycache__/tsv.cpython-310.pyc,, +sacrebleu/dataset/__pycache__/wmt_xml.cpython-310.pyc,, +sacrebleu/dataset/base.py,sha256=TZGsir4PvdcO8YF_MWevd3Qb95ycQB2rvtAOkYIR0XM,6724 +sacrebleu/dataset/fake_sgml.py,sha256=SW00xrlhdc9Sr9Z8Q7RF8mXVuLo3s1aNLC-zu3Crevo,4098 +sacrebleu/dataset/iwslt_xml.py,sha256=nAwIXBfbcQLbbI3Eoe2DqdjGfD1V-429_qrwuZZqmj0,210 +sacrebleu/dataset/plain_text.py,sha256=AKYCHFRtVLCoFod5pKBrqvKRxYmkqoPwUpcPup1Jg_8,1237 +sacrebleu/dataset/tsv.py,sha256=m__O5lc8GmPvKl_901bndLAOuh9a1fK1koHOKWX6h90,2179 +sacrebleu/dataset/wmt_xml.py,sha256=6xyzgctfaFWSxsUjKseWPy6YwHZekB-Ci-aUQrZFHP0,7828 +sacrebleu/metrics/__init__.py,sha256=MzC5hSbprlwwvbeu6_6_FOz3L7c1KvtBkmkgOaGVTDk,260 +sacrebleu/metrics/__pycache__/__init__.cpython-310.pyc,, +sacrebleu/metrics/__pycache__/base.cpython-310.pyc,, +sacrebleu/metrics/__pycache__/bleu.cpython-310.pyc,, +sacrebleu/metrics/__pycache__/chrf.cpython-310.pyc,, +sacrebleu/metrics/__pycache__/helpers.cpython-310.pyc,, +sacrebleu/metrics/__pycache__/lib_ter.cpython-310.pyc,, +sacrebleu/metrics/__pycache__/ter.cpython-310.pyc,, +sacrebleu/metrics/base.py,sha256=xTWUzNfXJIjnbiIYBlwhSoRgfiFYLqe84rIhYH1nSXM,16559 +sacrebleu/metrics/bleu.py,sha256=TqGdxEDu3H0P1uGKi-Y_BMNYK0xtpqtEGe7T0dJpgbs,17485 +sacrebleu/metrics/chrf.py,sha256=lhkOB3nwuSOvsub17Mct9gQ0oR038gEj5vj-1IFQhXU,10674 +sacrebleu/metrics/helpers.py,sha256=VWngO3F_9gUa4uQFH1WJm4xZqpq3CLYeAWEOeMuECx4,2339 +sacrebleu/metrics/lib_ter.py,sha256=OBJjJHmDBzq_supZW68Vma-tkSVy1tibSLw_R093ncE,16477 +sacrebleu/metrics/ter.py,sha256=TuS52VLjayllPf-EB6-8Sf7EdkIGdJ0L1QfXzCSOcBo,7769 +sacrebleu/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sacrebleu/sacrebleu.py,sha256=ZwXF3KtqBb-NGDVYclj5gkO7IlfV8-T7gM7kBDPX1VM,28458 +sacrebleu/significance.py,sha256=PDO7mCu5zL-7XNc9tA7QKs0C3TkcfrJgbKqYXwnXTp4,18329 +sacrebleu/tokenizers/__init__.py,sha256=V1unPdEJPrfDKNDYR1VSLu1bXCX0Fr8Uu8y9pNcZXGA,89 +sacrebleu/tokenizers/__pycache__/__init__.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_13a.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_base.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_char.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_intl.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_ja_mecab.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_ko_mecab.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_none.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_re.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_spm.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_ter.cpython-310.pyc,, +sacrebleu/tokenizers/__pycache__/tokenizer_zh.cpython-310.pyc,, +sacrebleu/tokenizers/tokenizer_13a.py,sha256=_1ClpQPIGqRj6uaklsFegAvSZUtlbE-yztmr51dLirU,985 +sacrebleu/tokenizers/tokenizer_base.py,sha256=YNvqL3oW3rsCtUbMNat6RhlYSn2VlZuPHgYHBj6jfJg,461 +sacrebleu/tokenizers/tokenizer_char.py,sha256=ubPsBjzNXqFPJ7WEpHQ2XD2ZWWqPgeOli9ErHOxmjTw,458 +sacrebleu/tokenizers/tokenizer_intl.py,sha256=EGayKRqqcY6oV-Zstah4mrYw979W5jK0AWK8Uo3C55Q,1869 +sacrebleu/tokenizers/tokenizer_ja_mecab.py,sha256=WYHc7xtAruICGfe-D6RYmnyrXxrlHlyNXVqQVs-6l1o,1420 +sacrebleu/tokenizers/tokenizer_ko_mecab.py,sha256=BWTEkHp8d2dJOAmlqdBtLt2MHJtpOdM78CWztQ-qS00,1455 +sacrebleu/tokenizers/tokenizer_none.py,sha256=0VX-qoUM3HzuLpF59L-gHjNgmoXXXpco23zYJtwmEmw,236 +sacrebleu/tokenizers/tokenizer_re.py,sha256=OGck3pPhwibf48Dssy-6cZObsHNetwb3IIJnUYGRiAg,1271 +sacrebleu/tokenizers/tokenizer_spm.py,sha256=otBUswldtMcpE95vvIDf-74eddy35_9p88mgv9J5T9g,2096 +sacrebleu/tokenizers/tokenizer_ter.py,sha256=37KPsx0jwsSZsqZ7UJKWAYyttvqS11SxzkHZIjrZBdA,6219 +sacrebleu/tokenizers/tokenizer_zh.py,sha256=GmBe3E29uECf61dFsR_qAHNzATEkgwwp0XAqb8de9wU,4724 +sacrebleu/utils.py,sha256=lwyJiKniOQYhX5Yb2Dfe2kTROa0Fs-q-weO2kIVAz00,22530 diff --git a/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/WHEEL b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..1f37c02f2eb2e26b306202feaccb31e522b8b169 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/entry_points.txt b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..07aad66d59eabe4a30481d4fe36ace4e1c17e234 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +sacrebleu = sacrebleu.sacrebleu:main diff --git a/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/top_level.txt b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..454a21c03558b3210da0efe42581688e9cfc4b1d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/sacrebleu-2.4.2.dist-info/top_level.txt @@ -0,0 +1 @@ +sacrebleu diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/INSTALLER b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/METADATA b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..9da6d09f0e1e81690a0a9f946721401ad2bbebc1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/METADATA @@ -0,0 +1,154 @@ +Metadata-Version: 2.1 +Name: urllib3 +Version: 2.2.1 +Summary: HTTP library with thread-safe connection pooling, file post, and more. +Project-URL: Changelog, https://github.com/urllib3/urllib3/blob/main/CHANGES.rst +Project-URL: Documentation, https://urllib3.readthedocs.io +Project-URL: Code, https://github.com/urllib3/urllib3 +Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues +Author-email: Andrey Petrov +Maintainer-email: Seth Michael Larson , Quentin Pradet , Illia Volochii +License-File: LICENSE.txt +Keywords: filepost,http,httplib,https,pooling,ssl,threadsafe,urllib +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.8 +Provides-Extra: brotli +Requires-Dist: brotli>=1.0.9; (platform_python_implementation == 'CPython') and extra == 'brotli' +Requires-Dist: brotlicffi>=0.8.0; (platform_python_implementation != 'CPython') and extra == 'brotli' +Provides-Extra: h2 +Requires-Dist: h2<5,>=4; extra == 'h2' +Provides-Extra: socks +Requires-Dist: pysocks!=1.5.7,<2.0,>=1.5.6; extra == 'socks' +Provides-Extra: zstd +Requires-Dist: zstandard>=0.18.0; extra == 'zstd' +Description-Content-Type: text/markdown + +

+ +![urllib3](https://github.com/urllib3/urllib3/raw/main/docs/_static/banner_github.svg) + +

+ +

+ PyPI Version + Python Versions + Join our Discord + Coverage Status + Build Status on GitHub + Documentation Status
+ OpenSSF Scorecard + SLSA 3 + CII Best Practices +

+ +urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the +Python ecosystem already uses urllib3 and you should too. +urllib3 brings many critical features that are missing from the Python +standard libraries: + +- Thread safety. +- Connection pooling. +- Client-side SSL/TLS verification. +- File uploads with multipart encoding. +- Helpers for retrying requests and dealing with HTTP redirects. +- Support for gzip, deflate, brotli, and zstd encoding. +- Proxy support for HTTP and SOCKS. +- 100% test coverage. + +urllib3 is powerful and easy to use: + +```python3 +>>> import urllib3 +>>> resp = urllib3.request("GET", "http://httpbin.org/robots.txt") +>>> resp.status +200 +>>> resp.data +b"User-agent: *\nDisallow: /deny\n" +``` + +## Installing + +urllib3 can be installed with [pip](https://pip.pypa.io): + +```bash +$ python -m pip install urllib3 +``` + +Alternatively, you can grab the latest source code from [GitHub](https://github.com/urllib3/urllib3): + +```bash +$ git clone https://github.com/urllib3/urllib3.git +$ cd urllib3 +$ pip install . +``` + + +## Documentation + +urllib3 has usage and reference documentation at [urllib3.readthedocs.io](https://urllib3.readthedocs.io). + + +## Community + +urllib3 has a [community Discord channel](https://discord.gg/urllib3) for asking questions and +collaborating with other contributors. Drop by and say hello 👋 + + +## Contributing + +urllib3 happily accepts contributions. Please see our +[contributing documentation](https://urllib3.readthedocs.io/en/latest/contributing.html) +for some tips on getting started. + + +## Security Disclosures + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure with maintainers. + + +## Maintainers + +- [@sethmlarson](https://github.com/sethmlarson) (Seth M. Larson) +- [@pquentin](https://github.com/pquentin) (Quentin Pradet) +- [@illia-v](https://github.com/illia-v) (Illia Volochii) +- [@theacodes](https://github.com/theacodes) (Thea Flowers) +- [@haikuginger](https://github.com/haikuginger) (Jess Shapiro) +- [@lukasa](https://github.com/lukasa) (Cory Benfield) +- [@sigmavirus24](https://github.com/sigmavirus24) (Ian Stapleton Cordasco) +- [@shazow](https://github.com/shazow) (Andrey Petrov) + +👋 + + +## Sponsorship + +If your company benefits from this library, please consider [sponsoring its +development](https://urllib3.readthedocs.io/en/latest/sponsors.html). + + +## For Enterprise + +Professional support for urllib3 is available as part of the [Tidelift +Subscription][1]. Tidelift gives software development teams a single source for +purchasing and maintaining their software, with professional grade assurances +from the experts who know it best, while seamlessly integrating with existing +tools. + +[1]: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/RECORD b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..2f90c8a4d9b3dfa426f453fdf3f47ade1614e09e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/RECORD @@ -0,0 +1,75 @@ +urllib3-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +urllib3-2.2.1.dist-info/METADATA,sha256=uROmjQwfAbwRYjV9PMdc5JF5NA3kRkpoKafPkNzybfc,6434 +urllib3-2.2.1.dist-info/RECORD,, +urllib3-2.2.1.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87 +urllib3-2.2.1.dist-info/licenses/LICENSE.txt,sha256=Ew46ZNX91dCWp1JpRjSn2d8oRGnehuVzIQAmgEHj1oY,1093 +urllib3/__init__.py,sha256=JMo1tg1nIV1AeJ2vENC_Txfl0e5h6Gzl9DGVk1rWRbo,6979 +urllib3/__pycache__/__init__.cpython-310.pyc,, +urllib3/__pycache__/_base_connection.cpython-310.pyc,, +urllib3/__pycache__/_collections.cpython-310.pyc,, +urllib3/__pycache__/_request_methods.cpython-310.pyc,, +urllib3/__pycache__/_version.cpython-310.pyc,, +urllib3/__pycache__/connection.cpython-310.pyc,, +urllib3/__pycache__/connectionpool.cpython-310.pyc,, +urllib3/__pycache__/exceptions.cpython-310.pyc,, +urllib3/__pycache__/fields.cpython-310.pyc,, +urllib3/__pycache__/filepost.cpython-310.pyc,, +urllib3/__pycache__/http2.cpython-310.pyc,, +urllib3/__pycache__/poolmanager.cpython-310.pyc,, +urllib3/__pycache__/response.cpython-310.pyc,, +urllib3/_base_connection.py,sha256=p-DOG_Me7-sJXO1R9VgDpNmdVU_kIS8VtaC7ptEllA0,5640 +urllib3/_collections.py,sha256=vzKA-7X-9resOamEWq52uV1nHshChjbYDvz47H0mMjw,17400 +urllib3/_request_methods.py,sha256=ucEpHQyQf06b9o1RxKLkCpzGH0ct-v7X2xGpU6rmmlo,9984 +urllib3/_version.py,sha256=12idLAcGmrAURPX52rGioBo33oQ__-ENJEdeqHvUUZg,98 +urllib3/connection.py,sha256=zFgaaoqrICsl7-kBp-_4va9m82sYhioAuy4-4iDpK0I,34704 +urllib3/connectionpool.py,sha256=XjTfYowLwN5ZzRMO41_OTbGNX4ANifgYVpWsVMRuC00,43556 +urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/contrib/__pycache__/__init__.cpython-310.pyc,, +urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc,, +urllib3/contrib/__pycache__/socks.cpython-310.pyc,, +urllib3/contrib/emscripten/__init__.py,sha256=u6KNgzjlFZbuAAXa_ybCR7gQ71VJESnF-IIdDA73brw,733 +urllib3/contrib/emscripten/__pycache__/__init__.cpython-310.pyc,, +urllib3/contrib/emscripten/__pycache__/connection.cpython-310.pyc,, +urllib3/contrib/emscripten/__pycache__/fetch.cpython-310.pyc,, +urllib3/contrib/emscripten/__pycache__/request.cpython-310.pyc,, +urllib3/contrib/emscripten/__pycache__/response.cpython-310.pyc,, +urllib3/contrib/emscripten/connection.py,sha256=kaBe2tWt7Yy9vNUFRBV7CSyDnfhCYILGxju9KTZj8Sw,8755 +urllib3/contrib/emscripten/emscripten_fetch_worker.js,sha256=CDfYF_9CDobtx2lGidyJ1zjDEvwNT5F-dchmVWXDh0E,3655 +urllib3/contrib/emscripten/fetch.py,sha256=ymwJlHBBuw6WTpKgPHpdmmrNBxlsr75HqoD4Rn27YXk,14131 +urllib3/contrib/emscripten/request.py,sha256=mL28szy1KvE3NJhWor5jNmarp8gwplDU-7gwGZY5g0Q,566 +urllib3/contrib/emscripten/response.py,sha256=wIDmdJ4doFWqLl5s86l9n0V70gFjQ2HWaPgz69jM52E,9546 +urllib3/contrib/pyopenssl.py,sha256=X31eCYGwB09EkAHX8RhDKC0X0Ki7d0cCVWoMJZUM5bQ,19161 +urllib3/contrib/socks.py,sha256=gFS2-zOw4_vLGpUvExOf3fNVT8liz6vhM2t6lBPn3CY,7572 +urllib3/exceptions.py,sha256=RDaiudtR7rqbVKTKpLSgZBBtwaIqV7eZtervZV_mZag,9393 +urllib3/fields.py,sha256=8vi0PeRo_pE5chPmJA07LZtMkVls4UrBS1k2xM506jM,10843 +urllib3/filepost.py,sha256=-9qJT11cNGjO9dqnI20-oErZuTvNaM18xZZPCjZSbOE,2395 +urllib3/http2.py,sha256=4QQcjTM9UYOQZe0r8KnA8anU9ST4p_s3SB3gRTueyPc,7480 +urllib3/poolmanager.py,sha256=fcC3OwjFKxha06NsOORwbZOzrVt1pyY-bNCbKiqC0l8,22935 +urllib3/py.typed,sha256=UaCuPFa3H8UAakbt-5G8SPacldTOGvJv18pPjUJ5gDY,93 +urllib3/response.py,sha256=lmvseToQbkLXuFyA3jcSyCPjTgSfa6YPA4xUhVqq8QI,43874 +urllib3/util/__init__.py,sha256=-qeS0QceivazvBEKDNFCAI-6ACcdDOE4TMvo7SLNlAQ,1001 +urllib3/util/__pycache__/__init__.cpython-310.pyc,, +urllib3/util/__pycache__/connection.cpython-310.pyc,, +urllib3/util/__pycache__/proxy.cpython-310.pyc,, +urllib3/util/__pycache__/request.cpython-310.pyc,, +urllib3/util/__pycache__/response.cpython-310.pyc,, +urllib3/util/__pycache__/retry.cpython-310.pyc,, +urllib3/util/__pycache__/ssl_.cpython-310.pyc,, +urllib3/util/__pycache__/ssl_match_hostname.cpython-310.pyc,, +urllib3/util/__pycache__/ssltransport.cpython-310.pyc,, +urllib3/util/__pycache__/timeout.cpython-310.pyc,, +urllib3/util/__pycache__/url.cpython-310.pyc,, +urllib3/util/__pycache__/util.cpython-310.pyc,, +urllib3/util/__pycache__/wait.cpython-310.pyc,, +urllib3/util/connection.py,sha256=QeUUEuNmhznpuKNPL-B0IVOkMdMCu8oJX62OC0Vpzug,4462 +urllib3/util/proxy.py,sha256=seP8-Q5B6bB0dMtwPj-YcZZQ30vHuLqRu-tI0JZ2fzs,1148 +urllib3/util/request.py,sha256=PQnBmKUHMQ0hQQ41uhbLNAeA24ke60m6zeiwfwocpGo,8102 +urllib3/util/response.py,sha256=vQE639uoEhj1vpjEdxu5lNIhJCSUZkd7pqllUI0BZOA,3374 +urllib3/util/retry.py,sha256=WB-7x1m7fQH_-Qqtrk2OGvz93GvBTxc-pRn8Vf3p4mg,18384 +urllib3/util/ssl_.py,sha256=FeymdS68RggEROwMB9VLGSqLHq2hRUKnIbQC_bCpGJI,19109 +urllib3/util/ssl_match_hostname.py,sha256=gaWqixoYtQ_GKO8fcRGFj3VXeMoqyxQQuUTPgWeiL_M,5812 +urllib3/util/ssltransport.py,sha256=SF__JQXVcHBQniFJZp3P9q-UeHM310WVwcBwqT9dCLE,9034 +urllib3/util/timeout.py,sha256=4eT1FVeZZU7h7mYD1Jq2OXNe4fxekdNvhoWUkZusRpA,10346 +urllib3/util/url.py,sha256=wHORhp80RAXyTlAIkTqLFzSrkU7J34ZDxX-tN65MBZk,15213 +urllib3/util/util.py,sha256=j3lbZK1jPyiwD34T8IgJzdWEZVT-4E-0vYIJi9UjeNA,1146 +urllib3/util/wait.py,sha256=_ph8IrUR3sqPqi0OopQgJUlH4wzkGeM5CiyA7XGGtmI,4423 diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/WHEEL b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..5998f3aab327ceb8cb346647a3461e220359aebf --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/licenses/LICENSE.txt b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6183d0276b26c5b87aecccf8d0d5bcd7b1148d4 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3-2.2.1.dist-info/licenses/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__init__.py b/env-llmeval/lib/python3.10/site-packages/urllib3/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3fe782c8a45bbabcf240f3cac4303ac12b0ec274 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/__init__.py @@ -0,0 +1,211 @@ +""" +Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more +""" + +from __future__ import annotations + +# Set default logging handler to avoid "No handler found" warnings. +import logging +import sys +import typing +import warnings +from logging import NullHandler + +from . import exceptions +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from ._version import __version__ +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url +from .filepost import _TYPE_FIELDS, encode_multipart_formdata +from .poolmanager import PoolManager, ProxyManager, proxy_from_url +from .response import BaseHTTPResponse, HTTPResponse +from .util.request import make_headers +from .util.retry import Retry +from .util.timeout import Timeout + +# Ensure that Python is compiled with OpenSSL 1.1.1+ +# If the 'ssl' module isn't available at all that's +# fine, we only care if the module is available. +try: + import ssl +except ImportError: + pass +else: + if not ssl.OPENSSL_VERSION.startswith("OpenSSL "): # Defensive: + warnings.warn( + "urllib3 v2 only supports OpenSSL 1.1.1+, currently " + f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. " + "See: https://github.com/urllib3/urllib3/issues/3020", + exceptions.NotOpenSSLWarning, + ) + elif ssl.OPENSSL_VERSION_INFO < (1, 1, 1): # Defensive: + raise ImportError( + "urllib3 v2 only supports OpenSSL 1.1.1+, currently " + f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. " + "See: https://github.com/urllib3/urllib3/issues/2168" + ) + +__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" +__license__ = "MIT" +__version__ = __version__ + +__all__ = ( + "HTTPConnectionPool", + "HTTPHeaderDict", + "HTTPSConnectionPool", + "PoolManager", + "ProxyManager", + "HTTPResponse", + "Retry", + "Timeout", + "add_stderr_logger", + "connection_from_url", + "disable_warnings", + "encode_multipart_formdata", + "make_headers", + "proxy_from_url", + "request", + "BaseHTTPResponse", +) + +logging.getLogger(__name__).addHandler(NullHandler()) + + +def add_stderr_logger( + level: int = logging.DEBUG, +) -> logging.StreamHandler[typing.TextIO]: + """ + Helper for quickly adding a StreamHandler to the logger. Useful for + debugging. + + Returns the handler after adding it. + """ + # This method needs to be in this __init__.py to get the __name__ correct + # even if urllib3 is vendored within another package. + logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) + logger.addHandler(handler) + logger.setLevel(level) + logger.debug("Added a stderr logging handler to logger: %s", __name__) + return handler + + +# ... Clean up. +del NullHandler + + +# All warning filters *must* be appended unless you're really certain that they +# shouldn't be: otherwise, it's very hard for users to use most Python +# mechanisms to silence them. +# SecurityWarning's always go off by default. +warnings.simplefilter("always", exceptions.SecurityWarning, append=True) +# InsecurePlatformWarning's don't vary between requests, so we keep it default. +warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) + + +def disable_warnings(category: type[Warning] = exceptions.HTTPWarning) -> None: + """ + Helper for quickly disabling all urllib3 warnings. + """ + warnings.simplefilter("ignore", category) + + +_DEFAULT_POOL = PoolManager() + + +def request( + method: str, + url: str, + *, + body: _TYPE_BODY | None = None, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + preload_content: bool | None = True, + decode_content: bool | None = True, + redirect: bool | None = True, + retries: Retry | bool | int | None = None, + timeout: Timeout | float | int | None = 3, + json: typing.Any | None = None, +) -> BaseHTTPResponse: + """ + A convenience, top-level request method. It uses a module-global ``PoolManager`` instance. + Therefore, its side effects could be shared across dependencies relying on it. + To avoid side effects create a new ``PoolManager`` instance and use it instead. + The method does not accept low-level ``**urlopen_kw`` keyword arguments. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param fields: + Data to encode and send in the request body. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. + + :param bool preload_content: + If True, the response's body will be preloaded into memory. + + :param bool decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param json: + Data to encode and send as JSON with UTF-encoded in the request body. + The ``"Content-Type"`` header will be set to ``"application/json"`` + unless specified otherwise. + """ + + return _DEFAULT_POOL.request( + method, + url, + body=body, + fields=fields, + headers=headers, + preload_content=preload_content, + decode_content=decode_content, + redirect=redirect, + retries=retries, + timeout=timeout, + json=json, + ) + + +if sys.platform == "emscripten": + from .contrib.emscripten import inject_into_urllib3 # noqa: 401 + + inject_into_urllib3() diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc533d3273c4bdfb1865009f3c790dd07c386b93 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_base_connection.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_base_connection.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7587b5c92b9ea304356890f0e3371225243419c Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_base_connection.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_collections.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_collections.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d498cbcd6d696a3dd9efb1b019a69453c2cb4d71 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_collections.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_request_methods.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_request_methods.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9db50aa751394edebbde087fedbb3ec627e10ac4 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_request_methods.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_version.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fe653da676f4dc87598efec95743069aa3935bd Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/_version.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/connection.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/connection.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c04aa110f6284fcbb8dff85983a835999849c36f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/connection.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/connectionpool.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/connectionpool.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1dffec164844d8a13702279fc57f85a8d3ba87f9 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/connectionpool.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/exceptions.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b02eb9bcd90599922f9c919a5712938ecb6bbc7b Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/exceptions.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/fields.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/fields.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1227757df3c01c8540be5ebb4049c5e11b8fd7e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/fields.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/filepost.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/filepost.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..857e557510cd8533a4ca7d4c00d5e671f11e7d13 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/filepost.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/http2.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/http2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac15b8c43104032ab48c02dfa26da8651403771d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/http2.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/poolmanager.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/poolmanager.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de43bb21414c722b53f59f3224170f315c9995df Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/poolmanager.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/response.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/response.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..032799a6389161ef363b0c00b94cb35649b1f116 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/__pycache__/response.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/_base_connection.py b/env-llmeval/lib/python3.10/site-packages/urllib3/_base_connection.py new file mode 100644 index 0000000000000000000000000000000000000000..bb349c744bd1a426550b8402f928707c7d1ef1c1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/_base_connection.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +import typing + +from .util.connection import _TYPE_SOCKET_OPTIONS +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT +from .util.url import Url + +_TYPE_BODY = typing.Union[bytes, typing.IO[typing.Any], typing.Iterable[bytes], str] + + +class ProxyConfig(typing.NamedTuple): + ssl_context: ssl.SSLContext | None + use_forwarding_for_https: bool + assert_hostname: None | str | Literal[False] + assert_fingerprint: str | None + + +class _ResponseOptions(typing.NamedTuple): + # TODO: Remove this in favor of a better + # HTTP request/response lifecycle tracking. + request_method: str + request_url: str + preload_content: bool + decode_content: bool + enforce_content_length: bool + + +if typing.TYPE_CHECKING: + import ssl + from typing import Literal, Protocol + + from .response import BaseHTTPResponse + + class BaseHTTPConnection(Protocol): + default_port: typing.ClassVar[int] + default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] + + host: str + port: int + timeout: None | ( + float + ) # Instance doesn't store _DEFAULT_TIMEOUT, must be resolved. + blocksize: int + source_address: tuple[str, int] | None + socket_options: _TYPE_SOCKET_OPTIONS | None + + proxy: Url | None + proxy_config: ProxyConfig | None + + is_verified: bool + proxy_is_verified: bool | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 8192, + socket_options: _TYPE_SOCKET_OPTIONS | None = ..., + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + ) -> None: + ... + + def set_tunnel( + self, + host: str, + port: int | None = None, + headers: typing.Mapping[str, str] | None = None, + scheme: str = "http", + ) -> None: + ... + + def connect(self) -> None: + ... + + def request( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + # We know *at least* botocore is depending on the order of the + # first 3 parameters so to be safe we only mark the later ones + # as keyword-only to ensure we have space to extend. + *, + chunked: bool = False, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> None: + ... + + def getresponse(self) -> BaseHTTPResponse: + ... + + def close(self) -> None: + ... + + @property + def is_closed(self) -> bool: + """Whether the connection either is brand new or has been previously closed. + If this property is True then both ``is_connected`` and ``has_connected_to_proxy`` + properties must be False. + """ + + @property + def is_connected(self) -> bool: + """Whether the connection is actively connected to any origin (proxy or target)""" + + @property + def has_connected_to_proxy(self) -> bool: + """Whether the connection has successfully connected to its proxy. + This returns False if no proxy is in use. Used to determine whether + errors are coming from the proxy layer or from tunnelling to the target origin. + """ + + class BaseHTTPSConnection(BaseHTTPConnection, Protocol): + default_port: typing.ClassVar[int] + default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] + + # Certificate verification methods + cert_reqs: int | str | None + assert_hostname: None | str | Literal[False] + assert_fingerprint: str | None + ssl_context: ssl.SSLContext | None + + # Trusted CAs + ca_certs: str | None + ca_cert_dir: str | None + ca_cert_data: None | str | bytes + + # TLS version + ssl_minimum_version: int | None + ssl_maximum_version: int | None + ssl_version: int | str | None # Deprecated + + # Client certificates + cert_file: str | None + key_file: str | None + key_password: str | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: _TYPE_SOCKET_OPTIONS | None = ..., + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + cert_reqs: int | str | None = None, + assert_hostname: None | str | Literal[False] = None, + assert_fingerprint: str | None = None, + server_hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + ca_certs: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, + ssl_version: int | str | None = None, # Deprecated + cert_file: str | None = None, + key_file: str | None = None, + key_password: str | None = None, + ) -> None: + ... diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/_collections.py b/env-llmeval/lib/python3.10/site-packages/urllib3/_collections.py new file mode 100644 index 0000000000000000000000000000000000000000..55b03247972b3397463cbee9f81ec3d1e131085b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/_collections.py @@ -0,0 +1,483 @@ +from __future__ import annotations + +import typing +from collections import OrderedDict +from enum import Enum, auto +from threading import RLock + +if typing.TYPE_CHECKING: + # We can only import Protocol if TYPE_CHECKING because it's a development + # dependency, and is not available at runtime. + from typing import Protocol + + from typing_extensions import Self + + class HasGettableStringKeys(Protocol): + def keys(self) -> typing.Iterator[str]: + ... + + def __getitem__(self, key: str) -> str: + ... + + +__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] + + +# Key type +_KT = typing.TypeVar("_KT") +# Value type +_VT = typing.TypeVar("_VT") +# Default type +_DT = typing.TypeVar("_DT") + +ValidHTTPHeaderSource = typing.Union[ + "HTTPHeaderDict", + typing.Mapping[str, str], + typing.Iterable[typing.Tuple[str, str]], + "HasGettableStringKeys", +] + + +class _Sentinel(Enum): + not_passed = auto() + + +def ensure_can_construct_http_header_dict( + potential: object, +) -> ValidHTTPHeaderSource | None: + if isinstance(potential, HTTPHeaderDict): + return potential + elif isinstance(potential, typing.Mapping): + # Full runtime checking of the contents of a Mapping is expensive, so for the + # purposes of typechecking, we assume that any Mapping is the right shape. + return typing.cast(typing.Mapping[str, str], potential) + elif isinstance(potential, typing.Iterable): + # Similarly to Mapping, full runtime checking of the contents of an Iterable is + # expensive, so for the purposes of typechecking, we assume that any Iterable + # is the right shape. + return typing.cast(typing.Iterable[typing.Tuple[str, str]], potential) + elif hasattr(potential, "keys") and hasattr(potential, "__getitem__"): + return typing.cast("HasGettableStringKeys", potential) + else: + return None + + +class RecentlyUsedContainer(typing.Generic[_KT, _VT], typing.MutableMapping[_KT, _VT]): + """ + Provides a thread-safe dict-like container which maintains up to + ``maxsize`` keys while throwing away the least-recently-used keys beyond + ``maxsize``. + + :param maxsize: + Maximum number of recent elements to retain. + + :param dispose_func: + Every time an item is evicted from the container, + ``dispose_func(value)`` is called. Callback which will get called + """ + + _container: typing.OrderedDict[_KT, _VT] + _maxsize: int + dispose_func: typing.Callable[[_VT], None] | None + lock: RLock + + def __init__( + self, + maxsize: int = 10, + dispose_func: typing.Callable[[_VT], None] | None = None, + ) -> None: + super().__init__() + self._maxsize = maxsize + self.dispose_func = dispose_func + self._container = OrderedDict() + self.lock = RLock() + + def __getitem__(self, key: _KT) -> _VT: + # Re-insert the item, moving it to the end of the eviction line. + with self.lock: + item = self._container.pop(key) + self._container[key] = item + return item + + def __setitem__(self, key: _KT, value: _VT) -> None: + evicted_item = None + with self.lock: + # Possibly evict the existing value of 'key' + try: + # If the key exists, we'll overwrite it, which won't change the + # size of the pool. Because accessing a key should move it to + # the end of the eviction line, we pop it out first. + evicted_item = key, self._container.pop(key) + self._container[key] = value + except KeyError: + # When the key does not exist, we insert the value first so that + # evicting works in all cases, including when self._maxsize is 0 + self._container[key] = value + if len(self._container) > self._maxsize: + # If we didn't evict an existing value, and we've hit our maximum + # size, then we have to evict the least recently used item from + # the beginning of the container. + evicted_item = self._container.popitem(last=False) + + # After releasing the lock on the pool, dispose of any evicted value. + if evicted_item is not None and self.dispose_func: + _, evicted_value = evicted_item + self.dispose_func(evicted_value) + + def __delitem__(self, key: _KT) -> None: + with self.lock: + value = self._container.pop(key) + + if self.dispose_func: + self.dispose_func(value) + + def __len__(self) -> int: + with self.lock: + return len(self._container) + + def __iter__(self) -> typing.NoReturn: + raise NotImplementedError( + "Iteration over this class is unlikely to be threadsafe." + ) + + def clear(self) -> None: + with self.lock: + # Copy pointers to all values, then wipe the mapping + values = list(self._container.values()) + self._container.clear() + + if self.dispose_func: + for value in values: + self.dispose_func(value) + + def keys(self) -> set[_KT]: # type: ignore[override] + with self.lock: + return set(self._container.keys()) + + +class HTTPHeaderDictItemView(typing.Set[typing.Tuple[str, str]]): + """ + HTTPHeaderDict is unusual for a Mapping[str, str] in that it has two modes of + address. + + If we directly try to get an item with a particular name, we will get a string + back that is the concatenated version of all the values: + + >>> d['X-Header-Name'] + 'Value1, Value2, Value3' + + However, if we iterate over an HTTPHeaderDict's items, we will optionally combine + these values based on whether combine=True was called when building up the dictionary + + >>> d = HTTPHeaderDict({"A": "1", "B": "foo"}) + >>> d.add("A", "2", combine=True) + >>> d.add("B", "bar") + >>> list(d.items()) + [ + ('A', '1, 2'), + ('B', 'foo'), + ('B', 'bar'), + ] + + This class conforms to the interface required by the MutableMapping ABC while + also giving us the nonstandard iteration behavior we want; items with duplicate + keys, ordered by time of first insertion. + """ + + _headers: HTTPHeaderDict + + def __init__(self, headers: HTTPHeaderDict) -> None: + self._headers = headers + + def __len__(self) -> int: + return len(list(self._headers.iteritems())) + + def __iter__(self) -> typing.Iterator[tuple[str, str]]: + return self._headers.iteritems() + + def __contains__(self, item: object) -> bool: + if isinstance(item, tuple) and len(item) == 2: + passed_key, passed_val = item + if isinstance(passed_key, str) and isinstance(passed_val, str): + return self._headers._has_value_for_header(passed_key, passed_val) + return False + + +class HTTPHeaderDict(typing.MutableMapping[str, str]): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 7230. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + """ + + _container: typing.MutableMapping[str, list[str]] + + def __init__(self, headers: ValidHTTPHeaderSource | None = None, **kwargs: str): + super().__init__() + self._container = {} # 'dict' is insert-ordered + if headers is not None: + if isinstance(headers, HTTPHeaderDict): + self._copy_from(headers) + else: + self.extend(headers) + if kwargs: + self.extend(kwargs) + + def __setitem__(self, key: str, val: str) -> None: + # avoid a bytes/str comparison by decoding before httplib + if isinstance(key, bytes): + key = key.decode("latin-1") + self._container[key.lower()] = [key, val] + + def __getitem__(self, key: str) -> str: + val = self._container[key.lower()] + return ", ".join(val[1:]) + + def __delitem__(self, key: str) -> None: + del self._container[key.lower()] + + def __contains__(self, key: object) -> bool: + if isinstance(key, str): + return key.lower() in self._container + return False + + def setdefault(self, key: str, default: str = "") -> str: + return super().setdefault(key, default) + + def __eq__(self, other: object) -> bool: + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return False + else: + other_as_http_header_dict = type(self)(maybe_constructable) + + return {k.lower(): v for k, v in self.itermerged()} == { + k.lower(): v for k, v in other_as_http_header_dict.itermerged() + } + + def __ne__(self, other: object) -> bool: + return not self.__eq__(other) + + def __len__(self) -> int: + return len(self._container) + + def __iter__(self) -> typing.Iterator[str]: + # Only provide the originally cased names + for vals in self._container.values(): + yield vals[0] + + def discard(self, key: str) -> None: + try: + del self[key] + except KeyError: + pass + + def add(self, key: str, val: str, *, combine: bool = False) -> None: + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + If this is called with combine=True, instead of adding a new header value + as a distinct item during iteration, this will instead append the value to + any existing header value with a comma. If no existing header value exists + for the key, then the value will simply be added, ignoring the combine parameter. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + >>> list(headers.items()) + [('foo', 'bar'), ('foo', 'baz')] + >>> headers.add('foo', 'quz', combine=True) + >>> list(headers.items()) + [('foo', 'bar, baz, quz')] + """ + # avoid a bytes/str comparison by decoding before httplib + if isinstance(key, bytes): + key = key.decode("latin-1") + key_lower = key.lower() + new_vals = [key, val] + # Keep the common case aka no item present as fast as possible + vals = self._container.setdefault(key_lower, new_vals) + if new_vals is not vals: + # if there are values here, then there is at least the initial + # key/value pair + assert len(vals) >= 2 + if combine: + vals[-1] = vals[-1] + ", " + val + else: + vals.append(val) + + def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None: + """Generic import function for any type of header-like object. + Adapted version of MutableMapping.update in order to insert items + with self.add instead of self.__setitem__ + """ + if len(args) > 1: + raise TypeError( + f"extend() takes at most 1 positional arguments ({len(args)} given)" + ) + other = args[0] if len(args) >= 1 else () + + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, typing.Mapping): + for key, val in other.items(): + self.add(key, val) + elif isinstance(other, typing.Iterable): + other = typing.cast(typing.Iterable[typing.Tuple[str, str]], other) + for key, value in other: + self.add(key, value) + elif hasattr(other, "keys") and hasattr(other, "__getitem__"): + # THIS IS NOT A TYPESAFE BRANCH + # In this branch, the object has a `keys` attr but is not a Mapping or any of + # the other types indicated in the method signature. We do some stuff with + # it as though it partially implements the Mapping interface, but we're not + # doing that stuff safely AT ALL. + for key in other.keys(): + self.add(key, other[key]) + + for key, value in kwargs.items(): + self.add(key, value) + + @typing.overload + def getlist(self, key: str) -> list[str]: + ... + + @typing.overload + def getlist(self, key: str, default: _DT) -> list[str] | _DT: + ... + + def getlist( + self, key: str, default: _Sentinel | _DT = _Sentinel.not_passed + ) -> list[str] | _DT: + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + try: + vals = self._container[key.lower()] + except KeyError: + if default is _Sentinel.not_passed: + # _DT is unbound; empty list is instance of List[str] + return [] + # _DT is bound; default is instance of _DT + return default + else: + # _DT may or may not be bound; vals[1:] is instance of List[str], which + # meets our external interface requirement of `Union[List[str], _DT]`. + return vals[1:] + + def _prepare_for_method_change(self) -> Self: + """ + Remove content-specific header fields before changing the request + method to GET or HEAD according to RFC 9110, Section 15.4. + """ + content_specific_headers = [ + "Content-Encoding", + "Content-Language", + "Content-Location", + "Content-Type", + "Content-Length", + "Digest", + "Last-Modified", + ] + for header in content_specific_headers: + self.discard(header) + return self + + # Backwards compatibility for httplib + getheaders = getlist + getallmatchingheaders = getlist + iget = getlist + + # Backwards compatibility for http.cookiejar + get_all = getlist + + def __repr__(self) -> str: + return f"{type(self).__name__}({dict(self.itermerged())})" + + def _copy_from(self, other: HTTPHeaderDict) -> None: + for key in other: + val = other.getlist(key) + self._container[key.lower()] = [key, *val] + + def copy(self) -> HTTPHeaderDict: + clone = type(self)() + clone._copy_from(self) + return clone + + def iteritems(self) -> typing.Iterator[tuple[str, str]]: + """Iterate over all header lines, including duplicate ones.""" + for key in self: + vals = self._container[key.lower()] + for val in vals[1:]: + yield vals[0], val + + def itermerged(self) -> typing.Iterator[tuple[str, str]]: + """Iterate over all headers, merging duplicate ones together.""" + for key in self: + val = self._container[key.lower()] + yield val[0], ", ".join(val[1:]) + + def items(self) -> HTTPHeaderDictItemView: # type: ignore[override] + return HTTPHeaderDictItemView(self) + + def _has_value_for_header(self, header_name: str, potential_value: str) -> bool: + if header_name in self: + return potential_value in self._container[header_name.lower()][1:] + return False + + def __ior__(self, other: object) -> HTTPHeaderDict: + # Supports extending a header dict in-place using operator |= + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + self.extend(maybe_constructable) + return self + + def __or__(self, other: object) -> HTTPHeaderDict: + # Supports merging header dicts using operator | + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + result = self.copy() + result.extend(maybe_constructable) + return result + + def __ror__(self, other: object) -> HTTPHeaderDict: + # Supports merging header dicts using operator | when other is on left side + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + result = type(self)(maybe_constructable) + result.extend(self) + return result diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/_request_methods.py b/env-llmeval/lib/python3.10/site-packages/urllib3/_request_methods.py new file mode 100644 index 0000000000000000000000000000000000000000..632042f03759466db4f87bb612ae0bab825e400e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/_request_methods.py @@ -0,0 +1,279 @@ +from __future__ import annotations + +import json as _json +import typing +from urllib.parse import urlencode + +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from .filepost import _TYPE_FIELDS, encode_multipart_formdata +from .response import BaseHTTPResponse + +__all__ = ["RequestMethods"] + +_TYPE_ENCODE_URL_FIELDS = typing.Union[ + typing.Sequence[typing.Tuple[str, typing.Union[str, bytes]]], + typing.Mapping[str, typing.Union[str, bytes]], +] + + +class RequestMethods: + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`urllib3.HTTPConnectionPool` and + :class:`urllib3.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-form-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} + + def __init__(self, headers: typing.Mapping[str, str] | None = None) -> None: + self.headers = headers or {} + + def urlopen( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + encode_multipart: bool = True, + multipart_boundary: str | None = None, + **kw: typing.Any, + ) -> BaseHTTPResponse: # Abstract + raise NotImplementedError( + "Classes extending RequestMethods must implement " + "their own ``urlopen`` method." + ) + + def request( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + json: typing.Any | None = None, + **urlopen_kw: typing.Any, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param fields: + Data to encode and send in the request body. Values are processed + by :func:`urllib.parse.urlencode`. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param json: + Data to encode and send as JSON with UTF-encoded in the request body. + The ``"Content-Type"`` header will be set to ``"application/json"`` + unless specified otherwise. + """ + method = method.upper() + + if json is not None and body is not None: + raise TypeError( + "request got values for both 'body' and 'json' parameters which are mutually exclusive" + ) + + if json is not None: + if headers is None: + headers = self.headers + + if not ("content-type" in map(str.lower, headers.keys())): + headers = HTTPHeaderDict(headers) + headers["Content-Type"] = "application/json" + + body = _json.dumps(json, separators=(",", ":"), ensure_ascii=False).encode( + "utf-8" + ) + + if body is not None: + urlopen_kw["body"] = body + + if method in self._encode_url_methods: + return self.request_encode_url( + method, + url, + fields=fields, # type: ignore[arg-type] + headers=headers, + **urlopen_kw, + ) + else: + return self.request_encode_body( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + + def request_encode_url( + self, + method: str, + url: str, + fields: _TYPE_ENCODE_URL_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + **urlopen_kw: str, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param fields: + Data to encode and send in the request body. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + """ + if headers is None: + headers = self.headers + + extra_kw: dict[str, typing.Any] = {"headers": headers} + extra_kw.update(urlopen_kw) + + if fields: + url += "?" + urlencode(fields) + + return self.urlopen(method, url, **extra_kw) + + def request_encode_body( + self, + method: str, + url: str, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + encode_multipart: bool = True, + multipart_boundary: str | None = None, + **urlopen_kw: str, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :func:`urllib3.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise + :func:`urllib.parse.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request + signing, such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example:: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimic behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param fields: + Data to encode and send in the request body. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param encode_multipart: + If True, encode the ``fields`` using the multipart/form-data MIME + format. + + :param multipart_boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + if headers is None: + headers = self.headers + + extra_kw: dict[str, typing.Any] = {"headers": HTTPHeaderDict(headers)} + body: bytes | str + + if fields: + if "body" in urlopen_kw: + raise TypeError( + "request got values for both 'fields' and 'body', can only specify one." + ) + + if encode_multipart: + body, content_type = encode_multipart_formdata( + fields, boundary=multipart_boundary + ) + else: + body, content_type = ( + urlencode(fields), # type: ignore[arg-type] + "application/x-www-form-urlencoded", + ) + + extra_kw["body"] = body + extra_kw["headers"].setdefault("Content-Type", content_type) + + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/_version.py b/env-llmeval/lib/python3.10/site-packages/urllib3/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..095cf3c16b9debf6bd3f77e75e26b0ab9c2a7612 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/_version.py @@ -0,0 +1,4 @@ +# This file is protected via CODEOWNERS +from __future__ import annotations + +__version__ = "2.2.1" diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/connection.py b/env-llmeval/lib/python3.10/site-packages/urllib3/connection.py new file mode 100644 index 0000000000000000000000000000000000000000..aa5c547c6673b03203f667075fc8aae8732b5376 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/connection.py @@ -0,0 +1,930 @@ +from __future__ import annotations + +import datetime +import logging +import os +import re +import socket +import sys +import typing +import warnings +from http.client import HTTPConnection as _HTTPConnection +from http.client import HTTPException as HTTPException # noqa: F401 +from http.client import ResponseNotReady +from socket import timeout as SocketTimeout + +if typing.TYPE_CHECKING: + from typing import Literal + + from .response import HTTPResponse + from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT + from .util.ssltransport import SSLTransport + +from ._collections import HTTPHeaderDict +from .util.response import assert_header_parsing +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout +from .util.util import to_str +from .util.wait import wait_for_read + +try: # Compiled with SSL? + import ssl + + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): + ssl = None # type: ignore[assignment] + + class BaseSSLError(BaseException): # type: ignore[no-redef] + pass + + +from ._base_connection import _TYPE_BODY +from ._base_connection import ProxyConfig as ProxyConfig +from ._base_connection import _ResponseOptions as _ResponseOptions +from ._version import __version__ +from .exceptions import ( + ConnectTimeoutError, + HeaderParsingError, + NameResolutionError, + NewConnectionError, + ProxyError, + SystemTimeWarning, +) +from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_ +from .util.request import body_to_chunks +from .util.ssl_ import assert_fingerprint as _assert_fingerprint +from .util.ssl_ import ( + create_urllib3_context, + is_ipaddress, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .util.ssl_match_hostname import CertificateError, match_hostname +from .util.url import Url + +# Not a no-op, we're adding this to the namespace so it can be imported. +ConnectionError = ConnectionError +BrokenPipeError = BrokenPipeError + + +log = logging.getLogger(__name__) + +port_by_scheme = {"http": 80, "https": 443} + +# When it comes time to update this value as a part of regular maintenance +# (ie test_recent_date is failing) update it to ~6 months before the current date. +RECENT_DATE = datetime.date(2023, 6, 1) + +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") + +_HAS_SYS_AUDIT = hasattr(sys, "audit") + + +class HTTPConnection(_HTTPConnection): + """ + Based on :class:`http.client.HTTPConnection` but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``source_address``: Set the source address for the current connection. + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass: + + .. code-block:: python + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + + default_port: typing.ClassVar[int] = port_by_scheme["http"] # type: ignore[misc] + + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options: typing.ClassVar[connection._TYPE_SOCKET_OPTIONS] = [ + (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + ] + + #: Whether this connection verifies the host's certificate. + is_verified: bool = False + + #: Whether this proxy connection verified the proxy host's certificate. + # If no proxy is currently connected to the value will be ``None``. + proxy_is_verified: bool | None = None + + blocksize: int + source_address: tuple[str, int] | None + socket_options: connection._TYPE_SOCKET_OPTIONS | None + + _has_connected_to_proxy: bool + _response_options: _ResponseOptions | None + _tunnel_host: str | None + _tunnel_port: int | None + _tunnel_scheme: str | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: None + | (connection._TYPE_SOCKET_OPTIONS) = default_socket_options, + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + ) -> None: + super().__init__( + host=host, + port=port, + timeout=Timeout.resolve_default_timeout(timeout), + source_address=source_address, + blocksize=blocksize, + ) + self.socket_options = socket_options + self.proxy = proxy + self.proxy_config = proxy_config + + self._has_connected_to_proxy = False + self._response_options = None + self._tunnel_host: str | None = None + self._tunnel_port: int | None = None + self._tunnel_scheme: str | None = None + + @property + def host(self) -> str: + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip(".") + + @host.setter + def host(self, value: str) -> None: + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + + def _new_conn(self) -> socket.socket: + """Establish a socket connection and set nodelay settings on it. + + :return: New socket connection. + """ + try: + sock = connection.create_connection( + (self._dns_host, self.port), + self.timeout, + source_address=self.source_address, + socket_options=self.socket_options, + ) + except socket.gaierror as e: + raise NameResolutionError(self.host, self, e) from e + except SocketTimeout as e: + raise ConnectTimeoutError( + self, + f"Connection to {self.host} timed out. (connect timeout={self.timeout})", + ) from e + + except OSError as e: + raise NewConnectionError( + self, f"Failed to establish a new connection: {e}" + ) from e + + # Audit hooks are only available in Python 3.8+ + if _HAS_SYS_AUDIT: + sys.audit("http.client.connect", self, self.host, self.port) + + return sock + + def set_tunnel( + self, + host: str, + port: int | None = None, + headers: typing.Mapping[str, str] | None = None, + scheme: str = "http", + ) -> None: + if scheme not in ("http", "https"): + raise ValueError( + f"Invalid proxy scheme for tunneling: {scheme!r}, must be either 'http' or 'https'" + ) + super().set_tunnel(host, port=port, headers=headers) + self._tunnel_scheme = scheme + + def connect(self) -> None: + self.sock = self._new_conn() + if self._tunnel_host: + # If we're tunneling it means we're connected to our proxy. + self._has_connected_to_proxy = True + + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() # type: ignore[attr-defined] + + # If there's a proxy to be connected to we are fully connected. + # This is set twice (once above and here) due to forwarding proxies + # not using tunnelling. + self._has_connected_to_proxy = bool(self.proxy) + + if self._has_connected_to_proxy: + self.proxy_is_verified = False + + @property + def is_closed(self) -> bool: + return self.sock is None + + @property + def is_connected(self) -> bool: + if self.sock is None: + return False + return not wait_for_read(self.sock, timeout=0.0) + + @property + def has_connected_to_proxy(self) -> bool: + return self._has_connected_to_proxy + + @property + def proxy_is_forwarding(self) -> bool: + """ + Return True if a forwarding proxy is configured, else return False + """ + return bool(self.proxy) and self._tunnel_host is None + + def close(self) -> None: + try: + super().close() + finally: + # Reset all stateful properties so connection + # can be re-used without leaking prior configs. + self.sock = None + self.is_verified = False + self.proxy_is_verified = None + self._has_connected_to_proxy = False + self._response_options = None + self._tunnel_host = None + self._tunnel_port = None + self._tunnel_scheme = None + + def putrequest( + self, + method: str, + url: str, + skip_host: bool = False, + skip_accept_encoding: bool = False, + ) -> None: + """""" + # Empty docstring because the indentation of CPython's implementation + # is broken but we don't want this method in our documentation. + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + f"Method cannot contain non-token characters {method!r} (found at least {match.group()!r})" + ) + + return super().putrequest( + method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding + ) + + def putheader(self, header: str, *values: str) -> None: # type: ignore[override] + """""" + if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): + super().putheader(header, *values) + elif to_str(header.lower()) not in SKIPPABLE_HEADERS: + skippable_headers = "', '".join( + [str.title(header) for header in sorted(SKIPPABLE_HEADERS)] + ) + raise ValueError( + f"urllib3.util.SKIP_HEADER only supports '{skippable_headers}'" + ) + + # `request` method's signature intentionally violates LSP. + # urllib3's API is different from `http.client.HTTPConnection` and the subclassing is only incidental. + def request( # type: ignore[override] + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + *, + chunked: bool = False, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> None: + # Update the inner socket's timeout value to send the request. + # This only triggers if the connection is re-used. + if self.sock is not None: + self.sock.settimeout(self.timeout) + + # Store these values to be fed into the HTTPResponse + # object later. TODO: Remove this in favor of a real + # HTTP lifecycle mechanism. + + # We have to store these before we call .request() + # because sometimes we can still salvage a response + # off the wire even if we aren't able to completely + # send the request body. + self._response_options = _ResponseOptions( + request_method=method, + request_url=url, + preload_content=preload_content, + decode_content=decode_content, + enforce_content_length=enforce_content_length, + ) + + if headers is None: + headers = {} + header_keys = frozenset(to_str(k.lower()) for k in headers) + skip_accept_encoding = "accept-encoding" in header_keys + skip_host = "host" in header_keys + self.putrequest( + method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host + ) + + # Transform the body into an iterable of sendall()-able chunks + # and detect if an explicit Content-Length is doable. + chunks_and_cl = body_to_chunks(body, method=method, blocksize=self.blocksize) + chunks = chunks_and_cl.chunks + content_length = chunks_and_cl.content_length + + # When chunked is explicit set to 'True' we respect that. + if chunked: + if "transfer-encoding" not in header_keys: + self.putheader("Transfer-Encoding", "chunked") + else: + # Detect whether a framing mechanism is already in use. If so + # we respect that value, otherwise we pick chunked vs content-length + # depending on the type of 'body'. + if "content-length" in header_keys: + chunked = False + elif "transfer-encoding" in header_keys: + chunked = True + + # Otherwise we go off the recommendation of 'body_to_chunks()'. + else: + chunked = False + if content_length is None: + if chunks is not None: + chunked = True + self.putheader("Transfer-Encoding", "chunked") + else: + self.putheader("Content-Length", str(content_length)) + + # Now that framing headers are out of the way we send all the other headers. + if "user-agent" not in header_keys: + self.putheader("User-Agent", _get_default_user_agent()) + for header, value in headers.items(): + self.putheader(header, value) + self.endheaders() + + # If we're given a body we start sending that in chunks. + if chunks is not None: + for chunk in chunks: + # Sending empty chunks isn't allowed for TE: chunked + # as it indicates the end of the body. + if not chunk: + continue + if isinstance(chunk, str): + chunk = chunk.encode("utf-8") + if chunked: + self.send(b"%x\r\n%b\r\n" % (len(chunk), chunk)) + else: + self.send(chunk) + + # Regardless of whether we have a body or not, if we're in + # chunked mode we want to send an explicit empty chunk. + if chunked: + self.send(b"0\r\n\r\n") + + def request_chunked( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + ) -> None: + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + warnings.warn( + "HTTPConnection.request_chunked() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead use HTTPConnection.request(..., chunked=True).", + category=DeprecationWarning, + stacklevel=2, + ) + self.request(method, url, body=body, headers=headers, chunked=True) + + def getresponse( # type: ignore[override] + self, + ) -> HTTPResponse: + """ + Get the response from the server. + + If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. + + If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. + """ + # Raise the same error as http.client.HTTPConnection + if self._response_options is None: + raise ResponseNotReady() + + # Reset this attribute for being used again. + resp_options = self._response_options + self._response_options = None + + # Since the connection's timeout value may have been updated + # we need to set the timeout on the socket. + self.sock.settimeout(self.timeout) + + # This is needed here to avoid circular import errors + from .response import HTTPResponse + + # Get the response from http.client.HTTPConnection + httplib_response = super().getresponse() + + try: + assert_header_parsing(httplib_response.msg) + except (HeaderParsingError, TypeError) as hpe: + log.warning( + "Failed to parse headers (url=%s): %s", + _url_from_connection(self, resp_options.request_url), + hpe, + exc_info=True, + ) + + headers = HTTPHeaderDict(httplib_response.msg.items()) + + response = HTTPResponse( + body=httplib_response, + headers=headers, + status=httplib_response.status, + version=httplib_response.version, + reason=httplib_response.reason, + preload_content=resp_options.preload_content, + decode_content=resp_options.decode_content, + original_response=httplib_response, + enforce_content_length=resp_options.enforce_content_length, + request_method=resp_options.request_method, + request_url=resp_options.request_url, + ) + return response + + +class HTTPSConnection(HTTPConnection): + """ + Many of the parameters to this constructor are passed to the underlying SSL + socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. + """ + + default_port = port_by_scheme["https"] # type: ignore[misc] + + cert_reqs: int | str | None = None + ca_certs: str | None = None + ca_cert_dir: str | None = None + ca_cert_data: None | str | bytes = None + ssl_version: int | str | None = None + ssl_minimum_version: int | None = None + ssl_maximum_version: int | None = None + assert_fingerprint: str | None = None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: None + | (connection._TYPE_SOCKET_OPTIONS) = HTTPConnection.default_socket_options, + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + cert_reqs: int | str | None = None, + assert_hostname: None | str | Literal[False] = None, + assert_fingerprint: str | None = None, + server_hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + ca_certs: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, + ssl_version: int | str | None = None, # Deprecated + cert_file: str | None = None, + key_file: str | None = None, + key_password: str | None = None, + ) -> None: + super().__init__( + host, + port=port, + timeout=timeout, + source_address=source_address, + blocksize=blocksize, + socket_options=socket_options, + proxy=proxy, + proxy_config=proxy_config, + ) + + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password + self.ssl_context = ssl_context + self.server_hostname = server_hostname + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ssl_version = ssl_version + self.ssl_minimum_version = ssl_minimum_version + self.ssl_maximum_version = ssl_maximum_version + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + # cert_reqs depends on ssl_context so calculate last. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + self.cert_reqs = cert_reqs + + def set_cert( + self, + key_file: str | None = None, + cert_file: str | None = None, + cert_reqs: int | str | None = None, + key_password: str | None = None, + ca_certs: str | None = None, + assert_hostname: None | str | Literal[False] = None, + assert_fingerprint: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ) -> None: + """ + This method should only be called once, before the connection is used. + """ + warnings.warn( + "HTTPSConnection.set_cert() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead provide the parameters to the " + "HTTPSConnection constructor.", + category=DeprecationWarning, + stacklevel=2, + ) + + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + def connect(self) -> None: + sock: socket.socket | ssl.SSLSocket + self.sock = sock = self._new_conn() + server_hostname: str = self.host + tls_in_tls = False + + # Do we need to establish a tunnel? + if self._tunnel_host is not None: + # We're tunneling to an HTTPS origin so need to do TLS-in-TLS. + if self._tunnel_scheme == "https": + # _connect_tls_proxy will verify and assign proxy_is_verified + self.sock = sock = self._connect_tls_proxy(self.host, sock) + tls_in_tls = True + elif self._tunnel_scheme == "http": + self.proxy_is_verified = False + + # If we're tunneling it means we're connected to our proxy. + self._has_connected_to_proxy = True + + self._tunnel() # type: ignore[attr-defined] + # Override the host with the one we're requesting data from. + server_hostname = self._tunnel_host + + if self.server_hostname is not None: + server_hostname = self.server_hostname + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn( + ( + f"System time is way off (before {RECENT_DATE}). This will probably " + "lead to SSL verification errors" + ), + SystemTimeWarning, + ) + + # Remove trailing '.' from fqdn hostnames to allow certificate validation + server_hostname_rm_dot = server_hostname.rstrip(".") + + sock_and_verified = _ssl_wrap_socket_and_match_hostname( + sock=sock, + cert_reqs=self.cert_reqs, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + server_hostname=server_hostname_rm_dot, + ssl_context=self.ssl_context, + tls_in_tls=tls_in_tls, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ) + self.sock = sock_and_verified.socket + + # Forwarding proxies can never have a verified target since + # the proxy is the one doing the verification. Should instead + # use a CONNECT tunnel in order to verify the target. + # See: https://github.com/urllib3/urllib3/issues/3267. + if self.proxy_is_forwarding: + self.is_verified = False + else: + self.is_verified = sock_and_verified.is_verified + + # If there's a proxy to be connected to we are fully connected. + # This is set twice (once above and here) due to forwarding proxies + # not using tunnelling. + self._has_connected_to_proxy = bool(self.proxy) + + # Set `self.proxy_is_verified` unless it's already set while + # establishing a tunnel. + if self._has_connected_to_proxy and self.proxy_is_verified is None: + self.proxy_is_verified = sock_and_verified.is_verified + + def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> ssl.SSLSocket: + """ + Establish a TLS connection to the proxy using the provided SSL context. + """ + # `_connect_tls_proxy` is called when self._tunnel_host is truthy. + proxy_config = typing.cast(ProxyConfig, self.proxy_config) + ssl_context = proxy_config.ssl_context + sock_and_verified = _ssl_wrap_socket_and_match_hostname( + sock, + cert_reqs=self.cert_reqs, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=hostname, + ssl_context=ssl_context, + assert_hostname=proxy_config.assert_hostname, + assert_fingerprint=proxy_config.assert_fingerprint, + # Features that aren't implemented for proxies yet: + cert_file=None, + key_file=None, + key_password=None, + tls_in_tls=False, + ) + self.proxy_is_verified = sock_and_verified.is_verified + return sock_and_verified.socket # type: ignore[return-value] + + +class _WrappedAndVerifiedSocket(typing.NamedTuple): + """ + Wrapped socket and whether the connection is + verified after the TLS handshake + """ + + socket: ssl.SSLSocket | SSLTransport + is_verified: bool + + +def _ssl_wrap_socket_and_match_hostname( + sock: socket.socket, + *, + cert_reqs: None | str | int, + ssl_version: None | str | int, + ssl_minimum_version: int | None, + ssl_maximum_version: int | None, + cert_file: str | None, + key_file: str | None, + key_password: str | None, + ca_certs: str | None, + ca_cert_dir: str | None, + ca_cert_data: None | str | bytes, + assert_hostname: None | str | Literal[False], + assert_fingerprint: str | None, + server_hostname: str | None, + ssl_context: ssl.SSLContext | None, + tls_in_tls: bool = False, +) -> _WrappedAndVerifiedSocket: + """Logic for constructing an SSLContext from all TLS parameters, passing + that down into ssl_wrap_socket, and then doing certificate verification + either via hostname or fingerprint. This function exists to guarantee + that both proxies and targets have the same behavior when connecting via TLS. + """ + default_ssl_context = False + if ssl_context is None: + default_ssl_context = True + context = create_urllib3_context( + ssl_version=resolve_ssl_version(ssl_version), + ssl_minimum_version=ssl_minimum_version, + ssl_maximum_version=ssl_maximum_version, + cert_reqs=resolve_cert_reqs(cert_reqs), + ) + else: + context = ssl_context + + context.verify_mode = resolve_cert_reqs(cert_reqs) + + # In some cases, we want to verify hostnames ourselves + if ( + # `ssl` can't verify fingerprints or alternate hostnames + assert_fingerprint + or assert_hostname + # assert_hostname can be set to False to disable hostname checking + or assert_hostname is False + # We still support OpenSSL 1.0.2, which prevents us from verifying + # hostnames easily: https://github.com/pyca/pyopenssl/pull/933 + or ssl_.IS_PYOPENSSL + or not ssl_.HAS_NEVER_CHECK_COMMON_NAME + ): + context.check_hostname = False + + # Try to load OS default certs if none are given. We need to do the hasattr() check + # for custom pyOpenSSL SSLContext objects because they don't support + # load_default_certs(). + if ( + not ca_certs + and not ca_cert_dir + and not ca_cert_data + and default_ssl_context + and hasattr(context, "load_default_certs") + ): + context.load_default_certs() + + # Ensure that IPv6 addresses are in the proper format and don't have a + # scope ID. Python's SSL module fails to recognize scoped IPv6 addresses + # and interprets them as DNS hostnames. + if server_hostname is not None: + normalized = server_hostname.strip("[]") + if "%" in normalized: + normalized = normalized[: normalized.rfind("%")] + if is_ipaddress(normalized): + server_hostname = normalized + + ssl_sock = ssl_wrap_socket( + sock=sock, + keyfile=key_file, + certfile=cert_file, + key_password=key_password, + ca_certs=ca_certs, + ca_cert_dir=ca_cert_dir, + ca_cert_data=ca_cert_data, + server_hostname=server_hostname, + ssl_context=context, + tls_in_tls=tls_in_tls, + ) + + try: + if assert_fingerprint: + _assert_fingerprint( + ssl_sock.getpeercert(binary_form=True), assert_fingerprint + ) + elif ( + context.verify_mode != ssl.CERT_NONE + and not context.check_hostname + and assert_hostname is not False + ): + cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment] + + # Need to signal to our match_hostname whether to use 'commonName' or not. + # If we're using our own constructed SSLContext we explicitly set 'False' + # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name. + if default_ssl_context: + hostname_checks_common_name = False + else: + hostname_checks_common_name = ( + getattr(context, "hostname_checks_common_name", False) or False + ) + + _match_hostname( + cert, + assert_hostname or server_hostname, # type: ignore[arg-type] + hostname_checks_common_name, + ) + + return _WrappedAndVerifiedSocket( + socket=ssl_sock, + is_verified=context.verify_mode == ssl.CERT_REQUIRED + or bool(assert_fingerprint), + ) + except BaseException: + ssl_sock.close() + raise + + +def _match_hostname( + cert: _TYPE_PEER_CERT_RET_DICT | None, + asserted_hostname: str, + hostname_checks_common_name: bool = False, +) -> None: + # Our upstream implementation of ssl.match_hostname() + # only applies this normalization to IP addresses so it doesn't + # match DNS SANs so we do the same thing! + stripped_hostname = asserted_hostname.strip("[]") + if is_ipaddress(stripped_hostname): + asserted_hostname = stripped_hostname + + try: + match_hostname(cert, asserted_hostname, hostname_checks_common_name) + except CertificateError as e: + log.warning( + "Certificate did not match expected hostname: %s. Certificate: %s", + asserted_hostname, + cert, + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert # type: ignore[attr-defined] + raise + + +def _wrap_proxy_error(err: Exception, proxy_scheme: str | None) -> ProxyError: + # Look for the phrase 'wrong version number', if found + # then we should warn the user that we're very sure that + # this proxy is HTTP-only and they have a configuration issue. + error_normalized = " ".join(re.split("[^a-z]", str(err).lower())) + is_likely_http_proxy = ( + "wrong version number" in error_normalized + or "unknown protocol" in error_normalized + or "record layer failure" in error_normalized + ) + http_proxy_warning = ( + ". Your proxy appears to only use HTTP and not HTTPS, " + "try changing your proxy URL to be HTTP. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#https-proxy-error-http-proxy" + ) + new_err = ProxyError( + f"Unable to connect to proxy" + f"{http_proxy_warning if is_likely_http_proxy and proxy_scheme == 'https' else ''}", + err, + ) + new_err.__cause__ = err + return new_err + + +def _get_default_user_agent() -> str: + return f"python-urllib3/{__version__}" + + +class DummyConnection: + """Used to detect a failed ConnectionCls import.""" + + +if not ssl: + HTTPSConnection = DummyConnection # type: ignore[misc, assignment] # noqa: F811 + + +VerifiedHTTPSConnection = HTTPSConnection + + +def _url_from_connection( + conn: HTTPConnection | HTTPSConnection, path: str | None = None +) -> str: + """Returns the URL from a given connection. This is mainly used for testing and logging.""" + + scheme = "https" if isinstance(conn, HTTPSConnection) else "http" + + return Url(scheme=scheme, host=conn.host, port=conn.port, path=path).url diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/connectionpool.py b/env-llmeval/lib/python3.10/site-packages/urllib3/connectionpool.py new file mode 100644 index 0000000000000000000000000000000000000000..bd58ff14dd4936288a6b3bc4d9303858a08c1eb8 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/connectionpool.py @@ -0,0 +1,1186 @@ +from __future__ import annotations + +import errno +import logging +import queue +import sys +import typing +import warnings +import weakref +from socket import timeout as SocketTimeout +from types import TracebackType + +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from ._request_methods import RequestMethods +from .connection import ( + BaseSSLError, + BrokenPipeError, + DummyConnection, + HTTPConnection, + HTTPException, + HTTPSConnection, + ProxyConfig, + _wrap_proxy_error, +) +from .connection import port_by_scheme as port_by_scheme +from .exceptions import ( + ClosedPoolError, + EmptyPoolError, + FullPoolError, + HostChangedError, + InsecureRequestWarning, + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, + ProxyError, + ReadTimeoutError, + SSLError, + TimeoutError, +) +from .response import BaseHTTPResponse +from .util.connection import is_connection_dropped +from .util.proxy import connection_requires_http_tunnel +from .util.request import _TYPE_BODY_POSITION, set_file_position +from .util.retry import Retry +from .util.ssl_match_hostname import CertificateError +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout +from .util.url import Url, _encode_target +from .util.url import _normalize_host as normalize_host +from .util.url import parse_url +from .util.util import to_str + +if typing.TYPE_CHECKING: + import ssl + from typing import Literal + + from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection + +log = logging.getLogger(__name__) + +_TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] + +_SelfT = typing.TypeVar("_SelfT") + + +# Pool objects +class ConnectionPool: + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + + .. note:: + ConnectionPool.urlopen() does not normalize or percent-encode target URIs + which is useful if your target server doesn't support percent-encoded + target URIs. + """ + + scheme: str | None = None + QueueCls = queue.LifoQueue + + def __init__(self, host: str, port: int | None = None) -> None: + if not host: + raise LocationValueError("No host specified.") + + self.host = _normalize_host(host, scheme=self.scheme) + self.port = port + + # This property uses 'normalize_host()' (not '_normalize_host()') + # to avoid removing square braces around IPv6 addresses. + # This value is sent to `HTTPConnection.set_tunnel()` if called + # because square braces are required for HTTP CONNECT tunneling. + self._tunnel_host = normalize_host(host, scheme=self.scheme).lower() + + def __str__(self) -> str: + return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" + + def __enter__(self: _SelfT) -> _SelfT: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> Literal[False]: + self.close() + # Return False to re-raise any potential exceptions + return False + + def close(self) -> None: + """ + Close all pooled connections and disable the pool. + """ + + +# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 +_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`http.client.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`http.client.HTTPConnection`. + + :param timeout: + Socket timeout in seconds for each individual connection. This can + be a float or integer, which sets the timeout for the HTTP request, + or an instance of :class:`urllib3.util.Timeout` which gives you more + fine-grained control over request timeouts. After the constructor has + been parsed, this is always a `urllib3.util.Timeout` object. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to False, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param retries: + Retry configuration to use by default with requests in this pool. + + :param _proxy: + Parsed proxy URL, should not be used directly, instead, see + :class:`urllib3.ProxyManager` + + :param _proxy_headers: + A dictionary with proxy headers, should not be used directly, + instead, see :class:`urllib3.ProxyManager` + + :param \\**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. + """ + + scheme = "http" + ConnectionCls: ( + type[BaseHTTPConnection] | type[BaseHTTPSConnection] + ) = HTTPConnection + + def __init__( + self, + host: str, + port: int | None = None, + timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, + maxsize: int = 1, + block: bool = False, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + _proxy: Url | None = None, + _proxy_headers: typing.Mapping[str, str] | None = None, + _proxy_config: ProxyConfig | None = None, + **conn_kw: typing.Any, + ): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + if not isinstance(timeout, Timeout): + timeout = Timeout.from_float(timeout) + + if retries is None: + retries = Retry.DEFAULT + + self.timeout = timeout + self.retries = retries + + self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize) + self.block = block + + self.proxy = _proxy + self.proxy_headers = _proxy_headers or {} + self.proxy_config = _proxy_config + + # Fill the queue up so that doing get() on it will block properly + for _ in range(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault("socket_options", []) + + self.conn_kw["proxy"] = self.proxy + self.conn_kw["proxy_config"] = self.proxy_config + + # Do not pass 'self' as callback to 'finalize'. + # Then the 'finalize' would keep an endless living (leak) to self. + # By just passing a reference to the pool allows the garbage collector + # to free self if nobody else has a reference to it. + pool = self.pool + + # Close all the HTTPConnections in the pool before the + # HTTPConnectionPool object is garbage collected. + weakref.finalize(self, _close_pool_connections, pool) + + def _new_conn(self) -> BaseHTTPConnection: + """ + Return a fresh :class:`HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTP connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "80", + ) + + conn = self.ConnectionCls( + host=self.host, + port=self.port, + timeout=self.timeout.connect_timeout, + **self.conn_kw, + ) + return conn + + def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection: + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + + if self.pool is None: + raise ClosedPoolError(self, "Pool is closed.") + + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") from None # Defensive: + + except queue.Empty: + if self.block: + raise EmptyPoolError( + self, + "Pool is empty and a new connection can't be opened due to blocking mode.", + ) from None + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.debug("Resetting dropped connection: %s", self.host) + conn.close() + + return conn or self._new_conn() + + def _put_conn(self, conn: BaseHTTPConnection | None) -> None: + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + if self.pool is not None: + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except queue.Full: + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + if self.block: + # This should never happen if you got the conn from self._get_conn + raise FullPoolError( + self, + "Pool reached maximum size and no more connections are allowed.", + ) from None + + log.warning( + "Connection pool is full, discarding connection: %s. Connection pool size: %s", + self.host, + self.pool.qsize(), + ) + + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + def _validate_conn(self, conn: BaseHTTPConnection) -> None: + """ + Called right before a request is made, after the socket is created. + """ + + def _prepare_proxy(self, conn: BaseHTTPConnection) -> None: + # Nothing to do for HTTP connections. + pass + + def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout: + """Helper that always returns a :class:`urllib3.util.Timeout`""" + if timeout is _DEFAULT_TIMEOUT: + return self.timeout.clone() + + if isinstance(timeout, Timeout): + return timeout.clone() + else: + # User passed us an int/float. This is for backwards compatibility, + # can be removed later + return Timeout.from_float(timeout) + + def _raise_timeout( + self, + err: BaseSSLError | OSError | SocketTimeout, + url: str, + timeout_value: _TYPE_TIMEOUT | None, + ) -> None: + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={timeout_value})" + ) from err + + # See the above comment about EAGAIN in Python 3. + if hasattr(err, "errno") and err.errno in _blocking_errnos: + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={timeout_value})" + ) from err + + def _make_request( + self, + conn: BaseHTTPConnection, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | None = None, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + chunked: bool = False, + response_conn: BaseHTTPConnection | None = None, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> BaseHTTPResponse: + """ + Perform a request on a given urllib connection object taken from our + pool. + + :param conn: + a connection from one of our connection pools + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param response_conn: + Set this to ``None`` if you will handle releasing the connection or + set the connection to have the response release it. + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + self.num_requests += 1 + + timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) + + try: + # Trigger any extra validation we need to do. + try: + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # _validate_conn() starts the connection to an HTTPS proxy + # so we need to wrap errors with 'ProxyError' here too. + except ( + OSError, + NewConnectionError, + TimeoutError, + BaseSSLError, + CertificateError, + SSLError, + ) as e: + new_e: Exception = e + if isinstance(e, (BaseSSLError, CertificateError)): + new_e = SSLError(e) + # If the connection didn't successfully connect to it's proxy + # then there + if isinstance( + new_e, (OSError, NewConnectionError, TimeoutError, SSLError) + ) and (conn and conn.proxy and not conn.has_connected_to_proxy): + new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) + raise new_e + + # conn.request() calls http.client.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + try: + conn.request( + method, + url, + body=body, + headers=headers, + chunked=chunked, + preload_content=preload_content, + decode_content=decode_content, + enforce_content_length=enforce_content_length, + ) + + # We are swallowing BrokenPipeError (errno.EPIPE) since the server is + # legitimately able to close the connection after sending a valid response. + # With this behaviour, the received response is still readable. + except BrokenPipeError: + pass + except OSError as e: + # MacOS/Linux + # EPROTOTYPE and ECONNRESET are needed on macOS + # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + # Condition changed later to emit ECONNRESET instead of only EPROTOTYPE. + if e.errno != errno.EPROTOTYPE and e.errno != errno.ECONNRESET: + raise + + # Reset the timeout for the recv() on the socket + read_timeout = timeout_obj.read_timeout + + if not conn.is_closed: + # In Python 3 socket.py will catch EAGAIN and return None when you + # try and read into the file pointer created by http.client, which + # instead raises a BadStatusLine exception. Instead of catching + # the exception and assuming all BadStatusLine exceptions are read + # timeouts, check for a zero timeout before making the request. + if read_timeout == 0: + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={read_timeout})" + ) + conn.timeout = read_timeout + + # Receive the response from the server + try: + response = conn.getresponse() + except (BaseSSLError, OSError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) + raise + + # Set properties that are used by the pooling layer. + response.retries = retries + response._connection = response_conn # type: ignore[attr-defined] + response._pool = self # type: ignore[attr-defined] + + # emscripten connection doesn't have _http_vsn_str + http_version = getattr(conn, "_http_vsn_str", "HTTP/?") + log.debug( + '%s://%s:%s "%s %s %s" %s %s', + self.scheme, + self.host, + self.port, + method, + url, + # HTTP version + http_version, + response.status, + response.length_remaining, + ) + + return response + + def close(self) -> None: + """ + Close all pooled connections and disable the pool. + """ + if self.pool is None: + return + # Disable access to the pool + old_pool, self.pool = self.pool, None + + # Close all the HTTPConnections in the pool. + _close_pool_connections(old_pool) + + def is_same_host(self, url: str) -> bool: + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith("/"): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, _, host, port, *_ = parse_url(url) + scheme = scheme or "http" + if host is not None: + host = _normalize_host(host, scheme=scheme) + + # Use explicit default port for comparison when none is given + if self.port and not port: + port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen( # type: ignore[override] + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + redirect: bool = True, + assert_same_host: bool = True, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + pool_timeout: int | None = None, + release_conn: bool | None = None, + chunked: bool = False, + body_pos: _TYPE_BODY_POSITION | None = None, + preload_content: bool = True, + decode_content: bool = True, + **response_kw: typing.Any, + ) -> BaseHTTPResponse: + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method + such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When ``False``, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param bool preload_content: + If True, the response's body will be preloaded into memory. + + :param bool decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of ``preload_content`` + which defaults to ``True``. + + :param bool chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param int body_pos: + Position to seek to in file-like body in the event of a retry or + redirect. Typically this won't need to be set because urllib3 will + auto-populate the value when needed. + """ + parsed_url = parse_url(url) + destination_scheme = parsed_url.scheme + + if headers is None: + headers = self.headers + + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if release_conn is None: + release_conn = preload_content + + # Check host + if assert_same_host and not self.is_same_host(url): + raise HostChangedError(self, url, retries) + + # Ensure that the URL we're connecting to is properly encoded + if url.startswith("/"): + url = to_str(_encode_target(url)) + else: + url = to_str(parsed_url.url) + + conn = None + + # Track whether `conn` needs to be released before + # returning/raising/recursing. Update this variable if necessary, and + # leave `release_conn` constant throughout the function. That way, if + # the function recurses, the original value of `release_conn` will be + # passed down into the recursive call, and its value will be respected. + # + # See issue #651 [1] for details. + # + # [1] + release_this_conn = release_conn + + http_tunnel_required = connection_requires_http_tunnel( + self.proxy, self.proxy_config, destination_scheme + ) + + # Merge the proxy headers. Only done when not using HTTP CONNECT. We + # have to copy the headers dict so we can safely change it without those + # changes being reflected in anyone else's copy. + if not http_tunnel_required: + headers = headers.copy() # type: ignore[attr-defined] + headers.update(self.proxy_headers) # type: ignore[union-attr] + + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + + # Rewind body position, if needed. Record current position + # for future rewinds in the event of a redirect/retry. + body_pos = set_file_position(body, body_pos) + + try: + # Request a connection from the queue. + timeout_obj = self._get_timeout(timeout) + conn = self._get_conn(timeout=pool_timeout) + + conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment] + + # Is this a closed/new connection that requires CONNECT tunnelling? + if self.proxy is not None and http_tunnel_required and conn.is_closed: + try: + self._prepare_proxy(conn) + except (BaseSSLError, OSError, SocketTimeout) as e: + self._raise_timeout( + err=e, url=self.proxy.url, timeout_value=conn.timeout + ) + raise + + # If we're going to release the connection in ``finally:``, then + # the response doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = conn if not release_conn else None + + # Make the request on the HTTPConnection object + response = self._make_request( + conn, + method, + url, + timeout=timeout_obj, + body=body, + headers=headers, + chunked=chunked, + retries=retries, + response_conn=response_conn, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Everything went great! + clean_exit = True + + except EmptyPoolError: + # Didn't get a connection from the pool, no need to clean up + clean_exit = True + release_this_conn = False + raise + + except ( + TimeoutError, + HTTPException, + OSError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ProxyError, + ) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + new_e: Exception = e + if isinstance(e, (BaseSSLError, CertificateError)): + new_e = SSLError(e) + if isinstance( + new_e, + ( + OSError, + NewConnectionError, + TimeoutError, + SSLError, + HTTPException, + ), + ) and (conn and conn.proxy and not conn.has_connected_to_proxy): + new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) + elif isinstance(new_e, (OSError, HTTPException)): + new_e = ProtocolError("Connection aborted.", new_e) + + retries = retries.increment( + method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] + ) + retries.sleep() + + # Keep track of the error for the retry warning. + err = e + + finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + if conn: + conn.close() + conn = None + release_this_conn = True + + if release_this_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + return self.urlopen( + method, + url, + body, + headers, + retries, + redirect, + assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. + method = "GET" + # And lose the body not to transfer anything sensitive. + body = None + headers = HTTPHeaderDict(headers)._prepare_for_method_change() + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep_for_retry(response) + log.debug("Redirecting %s -> %s", url, redirect_location) + return self.urlopen( + method, + redirect_location, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Check if we should retry the HTTP response. + has_retry_after = bool(response.headers.get("Retry-After")) + if retries.is_retry(method, response.status, has_retry_after): + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep(response) + log.debug("Retry: %s", url) + return self.urlopen( + method, + url, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + If ``assert_hostname`` is False, no verification is done. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. + """ + + scheme = "https" + ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection + + def __init__( + self, + host: str, + port: int | None = None, + timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, + maxsize: int = 1, + block: bool = False, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + _proxy: Url | None = None, + _proxy_headers: typing.Mapping[str, str] | None = None, + key_file: str | None = None, + cert_file: str | None = None, + cert_reqs: int | str | None = None, + key_password: str | None = None, + ca_certs: str | None = None, + ssl_version: int | str | None = None, + ssl_minimum_version: ssl.TLSVersion | None = None, + ssl_maximum_version: ssl.TLSVersion | None = None, + assert_hostname: str | Literal[False] | None = None, + assert_fingerprint: str | None = None, + ca_cert_dir: str | None = None, + **conn_kw: typing.Any, + ) -> None: + super().__init__( + host, + port, + timeout, + maxsize, + block, + headers, + retries, + _proxy, + _proxy_headers, + **conn_kw, + ) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir + self.ssl_version = ssl_version + self.ssl_minimum_version = ssl_minimum_version + self.ssl_maximum_version = ssl_maximum_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override] + """Establishes a tunnel connection through HTTP CONNECT.""" + if self.proxy and self.proxy.scheme == "https": + tunnel_scheme = "https" + else: + tunnel_scheme = "http" + + conn.set_tunnel( + scheme=tunnel_scheme, + host=self._tunnel_host, + port=self.port, + headers=self.proxy_headers, + ) + conn.connect() + + def _new_conn(self) -> BaseHTTPSConnection: + """ + Return a fresh :class:`urllib3.connection.HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTPS connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "443", + ) + + if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap] + raise ImportError( + "Can't connect to HTTPS URL because the SSL module is not available." + ) + + actual_host: str = self.host + actual_port = self.port + if self.proxy is not None and self.proxy.host is not None: + actual_host = self.proxy.host + actual_port = self.proxy.port + + return self.ConnectionCls( + host=actual_host, + port=actual_port, + timeout=self.timeout.connect_timeout, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + **self.conn_kw, + ) + + def _validate_conn(self, conn: BaseHTTPConnection) -> None: + """ + Called right before a request is made, after the socket is created. + """ + super()._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if conn.is_closed: + conn.connect() + + # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791 + if not conn.is_verified and not conn.proxy_is_verified: + warnings.warn( + ( + f"Unverified HTTPS request is being made to host '{conn.host}'. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#tls-warnings" + ), + InsecureRequestWarning, + ) + + +def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool: + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \\**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example:: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, _, host, port, *_ = parse_url(url) + scheme = scheme or "http" + port = port or port_by_scheme.get(scheme, 80) + if scheme == "https": + return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type] + else: + return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type] + + +@typing.overload +def _normalize_host(host: None, scheme: str | None) -> None: + ... + + +@typing.overload +def _normalize_host(host: str, scheme: str | None) -> str: + ... + + +def _normalize_host(host: str | None, scheme: str | None) -> str | None: + """ + Normalize hosts for comparisons and use with sockets. + """ + + host = normalize_host(host, scheme) + + # httplib doesn't like it when we include brackets in IPv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. See http://bugs.python.org/issue28539 + if host and host.startswith("[") and host.endswith("]"): + host = host[1:-1] + return host + + +def _url_from_pool( + pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None +) -> str: + """Returns the URL from a given connection pool. This is mainly used for testing and logging.""" + return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url + + +def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None: + """Drains a queue of connections and closes each one.""" + try: + while True: + conn = pool.get(block=False) + if conn: + conn.close() + except queue.Empty: + pass # Done. diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/exceptions.py b/env-llmeval/lib/python3.10/site-packages/urllib3/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..b0792f00fd1b67a6a974e209ea0ca8c0ff4fae91 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/exceptions.py @@ -0,0 +1,321 @@ +from __future__ import annotations + +import socket +import typing +import warnings +from email.errors import MessageDefect +from http.client import IncompleteRead as httplib_IncompleteRead + +if typing.TYPE_CHECKING: + from .connection import HTTPConnection + from .connectionpool import ConnectionPool + from .response import HTTPResponse + from .util.retry import Retry + +# Base Exceptions + + +class HTTPError(Exception): + """Base exception used by this module.""" + + +class HTTPWarning(Warning): + """Base warning used by this module.""" + + +_TYPE_REDUCE_RESULT = typing.Tuple[ + typing.Callable[..., object], typing.Tuple[object, ...] +] + + +class PoolError(HTTPError): + """Base exception for errors caused within a pool.""" + + def __init__(self, pool: ConnectionPool, message: str) -> None: + self.pool = pool + super().__init__(f"{pool}: {message}") + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, None) + + +class RequestError(PoolError): + """Base exception for PoolErrors that have associated URLs.""" + + def __init__(self, pool: ConnectionPool, url: str, message: str) -> None: + self.url = url + super().__init__(pool, message) + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, self.url, None) + + +class SSLError(HTTPError): + """Raised when SSL certificate fails in an HTTPS connection.""" + + +class ProxyError(HTTPError): + """Raised when the connection to a proxy fails.""" + + # The original error is also available as __cause__. + original_error: Exception + + def __init__(self, message: str, error: Exception) -> None: + super().__init__(message, error) + self.original_error = error + + +class DecodeError(HTTPError): + """Raised when automatic decoding based on Content-Type fails.""" + + +class ProtocolError(HTTPError): + """Raised when something unexpected happens mid-request/response.""" + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + +# Leaf Exceptions + + +class MaxRetryError(RequestError): + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param str url: The requested Url + :param reason: The underlying error + :type reason: :class:`Exception` + + """ + + def __init__( + self, pool: ConnectionPool, url: str, reason: Exception | None = None + ) -> None: + self.reason = reason + + message = f"Max retries exceeded with url: {url} (Caused by {reason!r})" + + super().__init__(pool, url, message) + + +class HostChangedError(RequestError): + """Raised when an existing pool gets a request for a foreign host.""" + + def __init__( + self, pool: ConnectionPool, url: str, retries: Retry | int = 3 + ) -> None: + message = f"Tried to open a foreign host with url: {url}" + super().__init__(pool, url, message) + self.retries = retries + + +class TimeoutStateError(HTTPError): + """Raised when passing an invalid state to a timeout""" + + +class TimeoutError(HTTPError): + """Raised when a socket timeout error occurs. + + Catching this error will catch both :exc:`ReadTimeoutErrors + ` and :exc:`ConnectTimeoutErrors `. + """ + + +class ReadTimeoutError(TimeoutError, RequestError): + """Raised when a socket timeout occurs while receiving data from a server""" + + +# This timeout error does not have a URL attached and needs to inherit from the +# base HTTPError +class ConnectTimeoutError(TimeoutError): + """Raised when a socket timeout occurs while connecting to a server""" + + +class NewConnectionError(ConnectTimeoutError, HTTPError): + """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" + + def __init__(self, conn: HTTPConnection, message: str) -> None: + self.conn = conn + super().__init__(f"{conn}: {message}") + + @property + def pool(self) -> HTTPConnection: + warnings.warn( + "The 'pool' property is deprecated and will be removed " + "in urllib3 v2.1.0. Use 'conn' instead.", + DeprecationWarning, + stacklevel=2, + ) + + return self.conn + + +class NameResolutionError(NewConnectionError): + """Raised when host name resolution fails.""" + + def __init__(self, host: str, conn: HTTPConnection, reason: socket.gaierror): + message = f"Failed to resolve '{host}' ({reason})" + super().__init__(conn, message) + + +class EmptyPoolError(PoolError): + """Raised when a pool runs out of connections and no more are allowed.""" + + +class FullPoolError(PoolError): + """Raised when we try to add a connection to a full pool in blocking mode.""" + + +class ClosedPoolError(PoolError): + """Raised when a request enters a pool after the pool has been closed.""" + + +class LocationValueError(ValueError, HTTPError): + """Raised when there is something wrong with a given URL input.""" + + +class LocationParseError(LocationValueError): + """Raised when get_host or similar fails to parse the URL input.""" + + def __init__(self, location: str) -> None: + message = f"Failed to parse: {location}" + super().__init__(message) + + self.location = location + + +class URLSchemeUnknown(LocationValueError): + """Raised when a URL input has an unsupported scheme.""" + + def __init__(self, scheme: str): + message = f"Not supported URL scheme {scheme}" + super().__init__(message) + + self.scheme = scheme + + +class ResponseError(HTTPError): + """Used as a container for an error reason supplied in a MaxRetryError.""" + + GENERIC_ERROR = "too many error responses" + SPECIFIC_ERROR = "too many {status_code} error responses" + + +class SecurityWarning(HTTPWarning): + """Warned when performing security reducing actions""" + + +class InsecureRequestWarning(SecurityWarning): + """Warned when making an unverified HTTPS request.""" + + +class NotOpenSSLWarning(SecurityWarning): + """Warned when using unsupported SSL library""" + + +class SystemTimeWarning(SecurityWarning): + """Warned when system time is suspected to be wrong""" + + +class InsecurePlatformWarning(SecurityWarning): + """Warned when certain TLS/SSL configuration is not available on a platform.""" + + +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + + +class ResponseNotChunked(ProtocolError, ValueError): + """Response needs to be chunked in order to read it as chunks.""" + + +class BodyNotHttplibCompatible(HTTPError): + """ + Body should be :class:`http.client.HTTPResponse` like + (have an fp attribute which returns raw chunks) for read_chunked(). + """ + + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + """ + Response length doesn't match expected Content-Length + + Subclass of :class:`http.client.IncompleteRead` to allow int value + for ``partial`` to avoid creating large objects on streamed reads. + """ + + partial: int # type: ignore[assignment] + expected: int + + def __init__(self, partial: int, expected: int) -> None: + self.partial = partial + self.expected = expected + + def __repr__(self) -> str: + return "IncompleteRead(%i bytes read, %i more expected)" % ( + self.partial, + self.expected, + ) + + +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + """Invalid chunk length in a chunked response.""" + + def __init__(self, response: HTTPResponse, length: bytes) -> None: + self.partial: int = response.tell() # type: ignore[assignment] + self.expected: int | None = response.length_remaining + self.response = response + self.length = length + + def __repr__(self) -> str: + return "InvalidChunkLength(got length %r, %i bytes read)" % ( + self.length, + self.partial, + ) + + +class InvalidHeader(HTTPError): + """The header provided was somehow invalid.""" + + +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + """ProxyManager does not support the supplied scheme""" + + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme: str | None) -> None: + # 'localhost' is here because our URL parser parses + # localhost:8080 -> scheme=localhost, remove if we fix this. + if scheme == "localhost": + scheme = None + if scheme is None: + message = "Proxy URL had no scheme, should start with http:// or https://" + else: + message = f"Proxy URL had unsupported scheme {scheme}, should use http:// or https://" + super().__init__(message) + + +class ProxySchemeUnsupported(ValueError): + """Fetching HTTPS resources through HTTPS proxies is unsupported""" + + +class HeaderParsingError(HTTPError): + """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" + + def __init__( + self, defects: list[MessageDefect], unparsed_data: bytes | str | None + ) -> None: + message = f"{defects or 'Unknown'}, unparsed data: {unparsed_data!r}" + super().__init__(message) + + +class UnrewindableBodyError(HTTPError): + """urllib3 encountered an error when trying to rewind a body""" diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/fields.py b/env-llmeval/lib/python3.10/site-packages/urllib3/fields.py new file mode 100644 index 0000000000000000000000000000000000000000..3e258a5d8ba0fe849217a0cf1f52df61a461e32a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/fields.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import email.utils +import mimetypes +import typing + +_TYPE_FIELD_VALUE = typing.Union[str, bytes] +_TYPE_FIELD_VALUE_TUPLE = typing.Union[ + _TYPE_FIELD_VALUE, + typing.Tuple[str, _TYPE_FIELD_VALUE], + typing.Tuple[str, _TYPE_FIELD_VALUE, str], +] + + +def guess_content_type( + filename: str | None, default: str = "application/octet-stream" +) -> str: + """ + Guess the "Content-Type" of a file. + + :param filename: + The filename to guess the "Content-Type" of using :mod:`mimetypes`. + :param default: + If no "Content-Type" can be guessed, default to `default`. + """ + if filename: + return mimetypes.guess_type(filename)[0] or default + return default + + +def format_header_param_rfc2231(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Helper function to format and quote a single header parameter using the + strategy defined in RFC 2231. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows + `RFC 2388 Section 4.4 `_. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :returns: + An RFC-2231-formatted unicode string. + + .. deprecated:: 2.0.0 + Will be removed in urllib3 v2.1.0. This is not valid for + ``multipart/form-data`` header parameters. + """ + import warnings + + warnings.warn( + "'format_header_param_rfc2231' is deprecated and will be " + "removed in urllib3 v2.1.0. This is not valid for " + "multipart/form-data header parameters.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(value, bytes): + value = value.decode("utf-8") + + if not any(ch in value for ch in '"\\\r\n'): + result = f'{name}="{value}"' + try: + result.encode("ascii") + except (UnicodeEncodeError, UnicodeDecodeError): + pass + else: + return result + + value = email.utils.encode_rfc2231(value, "utf-8") + value = f"{name}*={value}" + + return value + + +def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Format and quote a single multipart header parameter. + + This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching + the behavior of current browser and curl versions. Values are + assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are + percent encoded. + + .. _WHATWG HTML Standard: + https://html.spec.whatwg.org/multipage/ + form-control-infrastructure.html#multipart-form-data + + :param name: + The name of the parameter, an ASCII-only ``str``. + :param value: + The value of the parameter, a ``str`` or UTF-8 encoded + ``bytes``. + :returns: + A string ``name="value"`` with the escaped value. + + .. versionchanged:: 2.0.0 + Matches the WHATWG HTML Standard as of 2021/06/10. Control + characters are no longer percent encoded. + + .. versionchanged:: 2.0.0 + Renamed from ``format_header_param_html5`` and + ``format_header_param``. The old names will be removed in + urllib3 v2.1.0. + """ + if isinstance(value, bytes): + value = value.decode("utf-8") + + # percent encode \n \r " + value = value.translate({10: "%0A", 13: "%0D", 34: "%22"}) + return f'{name}="{value}"' + + +def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + .. deprecated:: 2.0.0 + Renamed to :func:`format_multipart_header_param`. Will be + removed in urllib3 v2.1.0. + """ + import warnings + + warnings.warn( + "'format_header_param_html5' has been renamed to " + "'format_multipart_header_param'. The old name will be " + "removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + return format_multipart_header_param(name, value) + + +def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + .. deprecated:: 2.0.0 + Renamed to :func:`format_multipart_header_param`. Will be + removed in urllib3 v2.1.0. + """ + import warnings + + warnings.warn( + "'format_header_param' has been renamed to " + "'format_multipart_header_param'. The old name will be " + "removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + return format_multipart_header_param(name, value) + + +class RequestField: + """ + A data container for request body parameters. + + :param name: + The name of this request field. Must be unicode. + :param data: + The data/value body. + :param filename: + An optional filename of the request field. Must be unicode. + :param headers: + An optional dict-like object of headers to initially use for the field. + + .. versionchanged:: 2.0.0 + The ``header_formatter`` parameter is deprecated and will + be removed in urllib3 v2.1.0. + """ + + def __init__( + self, + name: str, + data: _TYPE_FIELD_VALUE, + filename: str | None = None, + headers: typing.Mapping[str, str] | None = None, + header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None, + ): + self._name = name + self._filename = filename + self.data = data + self.headers: dict[str, str | None] = {} + if headers: + self.headers = dict(headers) + + if header_formatter is not None: + import warnings + + warnings.warn( + "The 'header_formatter' parameter is deprecated and " + "will be removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + self.header_formatter = header_formatter + else: + self.header_formatter = format_multipart_header_param + + @classmethod + def from_tuples( + cls, + fieldname: str, + value: _TYPE_FIELD_VALUE_TUPLE, + header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None, + ) -> RequestField: + """ + A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. + + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: + + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + + Field names and filenames must be unicode. + """ + filename: str | None + content_type: str | None + data: _TYPE_FIELD_VALUE + + if isinstance(value, tuple): + if len(value) == 3: + filename, data, content_type = value + else: + filename, data = value + content_type = guess_content_type(filename) + else: + filename = None + content_type = None + data = value + + request_param = cls( + fieldname, data, filename=filename, header_formatter=header_formatter + ) + request_param.make_multipart(content_type=content_type) + + return request_param + + def _render_part(self, name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Override this method to change how each multipart header + parameter is formatted. By default, this calls + :func:`format_multipart_header_param`. + + :param name: + The name of the parameter, an ASCII-only ``str``. + :param value: + The value of the parameter, a ``str`` or UTF-8 encoded + ``bytes``. + + :meta public: + """ + return self.header_formatter(name, value) + + def _render_parts( + self, + header_parts: ( + dict[str, _TYPE_FIELD_VALUE | None] + | typing.Sequence[tuple[str, _TYPE_FIELD_VALUE | None]] + ), + ) -> str: + """ + Helper function to format and quote a single header. + + Useful for single headers that are composed of multiple items. E.g., + 'Content-Disposition' fields. + + :param header_parts: + A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. + """ + iterable: typing.Iterable[tuple[str, _TYPE_FIELD_VALUE | None]] + + parts = [] + if isinstance(header_parts, dict): + iterable = header_parts.items() + else: + iterable = header_parts + + for name, value in iterable: + if value is not None: + parts.append(self._render_part(name, value)) + + return "; ".join(parts) + + def render_headers(self) -> str: + """ + Renders the headers for this request field. + """ + lines = [] + + sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] + for sort_key in sort_keys: + if self.headers.get(sort_key, False): + lines.append(f"{sort_key}: {self.headers[sort_key]}") + + for header_name, header_value in self.headers.items(): + if header_name not in sort_keys: + if header_value: + lines.append(f"{header_name}: {header_value}") + + lines.append("\r\n") + return "\r\n".join(lines) + + def make_multipart( + self, + content_disposition: str | None = None, + content_type: str | None = None, + content_location: str | None = None, + ) -> None: + """ + Makes this request field into a multipart request field. + + This method overrides "Content-Disposition", "Content-Type" and + "Content-Location" headers to the request parameter. + + :param content_disposition: + The 'Content-Disposition' of the request body. Defaults to 'form-data' + :param content_type: + The 'Content-Type' of the request body. + :param content_location: + The 'Content-Location' of the request body. + + """ + content_disposition = (content_disposition or "form-data") + "; ".join( + [ + "", + self._render_parts( + (("name", self._name), ("filename", self._filename)) + ), + ] + ) + + self.headers["Content-Disposition"] = content_disposition + self.headers["Content-Type"] = content_type + self.headers["Content-Location"] = content_location diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/filepost.py b/env-llmeval/lib/python3.10/site-packages/urllib3/filepost.py new file mode 100644 index 0000000000000000000000000000000000000000..1c90a211fbc4337b08734db0d2fd3de0f4eb0e21 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/filepost.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +import binascii +import codecs +import os +import typing +from io import BytesIO + +from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField + +writer = codecs.lookup("utf-8")[3] + +_TYPE_FIELDS_SEQUENCE = typing.Sequence[ + typing.Union[typing.Tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField] +] +_TYPE_FIELDS = typing.Union[ + _TYPE_FIELDS_SEQUENCE, + typing.Mapping[str, _TYPE_FIELD_VALUE_TUPLE], +] + + +def choose_boundary() -> str: + """ + Our embarrassingly-simple replacement for mimetools.choose_boundary. + """ + return binascii.hexlify(os.urandom(16)).decode() + + +def iter_field_objects(fields: _TYPE_FIELDS) -> typing.Iterable[RequestField]: + """ + Iterate over fields. + + Supports list of (k, v) tuples and dicts, and lists of + :class:`~urllib3.fields.RequestField`. + + """ + iterable: typing.Iterable[RequestField | tuple[str, _TYPE_FIELD_VALUE_TUPLE]] + + if isinstance(fields, typing.Mapping): + iterable = fields.items() + else: + iterable = fields + + for field in iterable: + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) + + +def encode_multipart_formdata( + fields: _TYPE_FIELDS, boundary: str | None = None +) -> tuple[bytes, str]: + """ + Encode a dictionary of ``fields`` using the multipart/form-data MIME format. + + :param fields: + Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). + Values are processed by :func:`urllib3.fields.RequestField.from_tuples`. + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + body = BytesIO() + if boundary is None: + boundary = choose_boundary() + + for field in iter_field_objects(fields): + body.write(f"--{boundary}\r\n".encode("latin-1")) + + writer(body).write(field.render_headers()) + data = field.data + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, str): + writer(body).write(data) + else: + body.write(data) + + body.write(b"\r\n") + + body.write(f"--{boundary}--\r\n".encode("latin-1")) + + content_type = f"multipart/form-data; boundary={boundary}" + + return body.getvalue(), content_type diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/http2.py b/env-llmeval/lib/python3.10/site-packages/urllib3/http2.py new file mode 100644 index 0000000000000000000000000000000000000000..15fa9d9157e7a1c075fec33e5bea49b44e1f7e0d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/http2.py @@ -0,0 +1,229 @@ +from __future__ import annotations + +import threading +import types +import typing + +import h2.config # type: ignore[import-untyped] +import h2.connection # type: ignore[import-untyped] +import h2.events # type: ignore[import-untyped] + +import urllib3.connection +import urllib3.util.ssl_ +from urllib3.response import BaseHTTPResponse + +from ._collections import HTTPHeaderDict +from .connection import HTTPSConnection +from .connectionpool import HTTPSConnectionPool + +orig_HTTPSConnection = HTTPSConnection + +T = typing.TypeVar("T") + + +class _LockedObject(typing.Generic[T]): + """ + A wrapper class that hides a specific object behind a lock. + + The goal here is to provide a simple way to protect access to an object + that cannot safely be simultaneously accessed from multiple threads. The + intended use of this class is simple: take hold of it with a context + manager, which returns the protected object. + """ + + def __init__(self, obj: T): + self.lock = threading.RLock() + self._obj = obj + + def __enter__(self) -> T: + self.lock.acquire() + return self._obj + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + self.lock.release() + + +class HTTP2Connection(HTTPSConnection): + def __init__( + self, host: str, port: int | None = None, **kwargs: typing.Any + ) -> None: + self._h2_conn = self._new_h2_conn() + self._h2_stream: int | None = None + self._h2_headers: list[tuple[bytes, bytes]] = [] + + if "proxy" in kwargs or "proxy_config" in kwargs: # Defensive: + raise NotImplementedError("Proxies aren't supported with HTTP/2") + + super().__init__(host, port, **kwargs) + + def _new_h2_conn(self) -> _LockedObject[h2.connection.H2Connection]: + config = h2.config.H2Configuration(client_side=True) + return _LockedObject(h2.connection.H2Connection(config=config)) + + def connect(self) -> None: + super().connect() + + with self._h2_conn as h2_conn: + h2_conn.initiate_connection() + self.sock.sendall(h2_conn.data_to_send()) + + def putrequest( + self, + method: str, + url: str, + skip_host: bool = False, + skip_accept_encoding: bool = False, + ) -> None: + with self._h2_conn as h2_conn: + self._request_url = url + self._h2_stream = h2_conn.get_next_available_stream_id() + + if ":" in self.host: + authority = f"[{self.host}]:{self.port or 443}" + else: + authority = f"{self.host}:{self.port or 443}" + + self._h2_headers.extend( + ( + (b":scheme", b"https"), + (b":method", method.encode()), + (b":authority", authority.encode()), + (b":path", url.encode()), + ) + ) + + def putheader(self, header: str, *values: str) -> None: # type: ignore[override] + for value in values: + self._h2_headers.append( + (header.encode("utf-8").lower(), value.encode("utf-8")) + ) + + def endheaders(self) -> None: # type: ignore[override] + with self._h2_conn as h2_conn: + h2_conn.send_headers( + stream_id=self._h2_stream, + headers=self._h2_headers, + end_stream=True, + ) + if data_to_send := h2_conn.data_to_send(): + self.sock.sendall(data_to_send) + + def send(self, data: bytes) -> None: # type: ignore[override] # Defensive: + if not data: + return + raise NotImplementedError("Sending data isn't supported yet") + + def getresponse( # type: ignore[override] + self, + ) -> HTTP2Response: + status = None + data = bytearray() + with self._h2_conn as h2_conn: + end_stream = False + while not end_stream: + # TODO: Arbitrary read value. + if received_data := self.sock.recv(65535): + events = h2_conn.receive_data(received_data) + for event in events: + if isinstance(event, h2.events.ResponseReceived): + headers = HTTPHeaderDict() + for header, value in event.headers: + if header == b":status": + status = int(value.decode()) + else: + headers.add( + header.decode("ascii"), value.decode("ascii") + ) + + elif isinstance(event, h2.events.DataReceived): + data += event.data + h2_conn.acknowledge_received_data( + event.flow_controlled_length, event.stream_id + ) + + elif isinstance(event, h2.events.StreamEnded): + end_stream = True + + if data_to_send := h2_conn.data_to_send(): + self.sock.sendall(data_to_send) + + # We always close to not have to handle connection management. + self.close() + + assert status is not None + return HTTP2Response( + status=status, + headers=headers, + request_url=self._request_url, + data=bytes(data), + ) + + def close(self) -> None: + with self._h2_conn as h2_conn: + try: + h2_conn.close_connection() + if data := h2_conn.data_to_send(): + self.sock.sendall(data) + except Exception: + pass + + # Reset all our HTTP/2 connection state. + self._h2_conn = self._new_h2_conn() + self._h2_stream = None + self._h2_headers = [] + + super().close() + + +class HTTP2Response(BaseHTTPResponse): + # TODO: This is a woefully incomplete response object, but works for non-streaming. + def __init__( + self, + status: int, + headers: HTTPHeaderDict, + request_url: str, + data: bytes, + decode_content: bool = False, # TODO: support decoding + ) -> None: + super().__init__( + status=status, + headers=headers, + # Following CPython, we map HTTP versions to major * 10 + minor integers + version=20, + # No reason phrase in HTTP/2 + reason=None, + decode_content=decode_content, + request_url=request_url, + ) + self._data = data + self.length_remaining = 0 + + @property + def data(self) -> bytes: + return self._data + + def get_redirect_location(self) -> None: + return None + + def close(self) -> None: + pass + + +def inject_into_urllib3() -> None: + HTTPSConnectionPool.ConnectionCls = HTTP2Connection + urllib3.connection.HTTPSConnection = HTTP2Connection # type: ignore[misc] + + # TODO: Offer 'http/1.1' as well, but for testing purposes this is handy. + urllib3.util.ssl_.ALPN_PROTOCOLS = ["h2"] + + +def extract_from_urllib3() -> None: + HTTPSConnectionPool.ConnectionCls = orig_HTTPSConnection + urllib3.connection.HTTPSConnection = orig_HTTPSConnection # type: ignore[misc] + + urllib3.util.ssl_.ALPN_PROTOCOLS = ["http/1.1"] diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/poolmanager.py b/env-llmeval/lib/python3.10/site-packages/urllib3/poolmanager.py new file mode 100644 index 0000000000000000000000000000000000000000..32da0a00ab7b6eb0c077926e508cdab6ca18b8e2 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/poolmanager.py @@ -0,0 +1,638 @@ +from __future__ import annotations + +import functools +import logging +import typing +import warnings +from types import TracebackType +from urllib.parse import urljoin + +from ._collections import HTTPHeaderDict, RecentlyUsedContainer +from ._request_methods import RequestMethods +from .connection import ProxyConfig +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme +from .exceptions import ( + LocationValueError, + MaxRetryError, + ProxySchemeUnknown, + URLSchemeUnknown, +) +from .response import BaseHTTPResponse +from .util.connection import _TYPE_SOCKET_OPTIONS +from .util.proxy import connection_requires_http_tunnel +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import Url, parse_url + +if typing.TYPE_CHECKING: + import ssl + from typing import Literal + +__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] + + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ( + "key_file", + "cert_file", + "cert_reqs", + "ca_certs", + "ca_cert_data", + "ssl_version", + "ssl_minimum_version", + "ssl_maximum_version", + "ca_cert_dir", + "ssl_context", + "key_password", + "server_hostname", +) +# Default value for `blocksize` - a new parameter introduced to +# http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7 +_DEFAULT_BLOCKSIZE = 16384 + +_SelfT = typing.TypeVar("_SelfT") + + +class PoolKey(typing.NamedTuple): + """ + All known keyword arguments that could be provided to the pool manager, its + pools, or the underlying connections. + + All custom key schemes should include the fields in this key at a minimum. + """ + + key_scheme: str + key_host: str + key_port: int | None + key_timeout: Timeout | float | int | None + key_retries: Retry | bool | int | None + key_block: bool | None + key_source_address: tuple[str, int] | None + key_key_file: str | None + key_key_password: str | None + key_cert_file: str | None + key_cert_reqs: str | None + key_ca_certs: str | None + key_ca_cert_data: str | bytes | None + key_ssl_version: int | str | None + key_ssl_minimum_version: ssl.TLSVersion | None + key_ssl_maximum_version: ssl.TLSVersion | None + key_ca_cert_dir: str | None + key_ssl_context: ssl.SSLContext | None + key_maxsize: int | None + key_headers: frozenset[tuple[str, str]] | None + key__proxy: Url | None + key__proxy_headers: frozenset[tuple[str, str]] | None + key__proxy_config: ProxyConfig | None + key_socket_options: _TYPE_SOCKET_OPTIONS | None + key__socks_options: frozenset[tuple[str, str]] | None + key_assert_hostname: bool | str | None + key_assert_fingerprint: str | None + key_server_hostname: str | None + key_blocksize: int | None + + +def _default_key_normalizer( + key_class: type[PoolKey], request_context: dict[str, typing.Any] +) -> PoolKey: + """ + Create a pool key out of a request context dictionary. + + According to RFC 3986, both the scheme and host are case-insensitive. + Therefore, this function normalizes both before constructing the pool + key for an HTTPS request. If you wish to change this behaviour, provide + alternate callables to ``key_fn_by_scheme``. + + :param key_class: + The class to use when constructing the key. This should be a namedtuple + with the ``scheme`` and ``host`` keys at a minimum. + :type key_class: namedtuple + :param request_context: + A dictionary-like object that contain the context for a request. + :type request_context: dict + + :return: A namedtuple that can be used as a connection pool key. + :rtype: PoolKey + """ + # Since we mutate the dictionary, make a copy first + context = request_context.copy() + context["scheme"] = context["scheme"].lower() + context["host"] = context["host"].lower() + + # These are both dictionaries and need to be transformed into frozensets + for key in ("headers", "_proxy_headers", "_socks_options"): + if key in context and context[key] is not None: + context[key] = frozenset(context[key].items()) + + # The socket_options key may be a list and needs to be transformed into a + # tuple. + socket_opts = context.get("socket_options") + if socket_opts is not None: + context["socket_options"] = tuple(socket_opts) + + # Map the kwargs to the names in the namedtuple - this is necessary since + # namedtuples can't have fields starting with '_'. + for key in list(context.keys()): + context["key_" + key] = context.pop(key) + + # Default to ``None`` for keys missing from the context + for field in key_class._fields: + if field not in context: + context[field] = None + + # Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context + if context.get("key_blocksize") is None: + context["key_blocksize"] = _DEFAULT_BLOCKSIZE + + return key_class(**context) + + +#: A dictionary that maps a scheme to a callable that creates a pool key. +#: This can be used to alter the way pool keys are constructed, if desired. +#: Each PoolManager makes a copy of this dictionary so they can be configured +#: globally here, or individually on the instance. +key_fn_by_scheme = { + "http": functools.partial(_default_key_normalizer, PoolKey), + "https": functools.partial(_default_key_normalizer, PoolKey), +} + +pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \\**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example: + + .. code-block:: python + + import urllib3 + + http = urllib3.PoolManager(num_pools=2) + + resp1 = http.request("GET", "https://google.com/") + resp2 = http.request("GET", "https://google.com/mail") + resp3 = http.request("GET", "https://yahoo.com/") + + print(len(http.pools)) + # 2 + + """ + + proxy: Url | None = None + proxy_config: ProxyConfig | None = None + + def __init__( + self, + num_pools: int = 10, + headers: typing.Mapping[str, str] | None = None, + **connection_pool_kw: typing.Any, + ) -> None: + super().__init__(headers) + self.connection_pool_kw = connection_pool_kw + + self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool] + self.pools = RecentlyUsedContainer(num_pools) + + # Locally set the pool classes and keys so other PoolManagers can + # override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + self.key_fn_by_scheme = key_fn_by_scheme.copy() + + def __enter__(self: _SelfT) -> _SelfT: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> Literal[False]: + self.clear() + # Return False to re-raise any potential exceptions + return False + + def _new_pool( + self, + scheme: str, + host: str, + port: int, + request_context: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + """ + Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and + any additional pool keyword arguments. + + If ``request_context`` is provided, it is provided as keyword arguments + to the pool class used. This method is used to actually create the + connection pools handed out by :meth:`connection_from_url` and + companion methods. It is intended to be overridden for customization. + """ + pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + # Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly + # set to 'None' in the request_context. + if request_context.get("blocksize") is None: + request_context["blocksize"] = _DEFAULT_BLOCKSIZE + + # Although the context has everything necessary to create the pool, + # this function has historically only used the scheme, host, and port + # in the positional args. When an API change is acceptable these can + # be removed. + for key in ("scheme", "host", "port"): + request_context.pop(key, None) + + if scheme == "http": + for kw in SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(host, port, **request_context) + + def clear(self) -> None: + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host( + self, + host: str | None, + port: int | None = None, + scheme: str | None = "http", + pool_kwargs: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is + provided, it is merged with the instance's ``connection_pool_kw`` + variable and used to create the new connection pool, if one is + needed. + """ + + if not host: + raise LocationValueError("No host specified.") + + request_context = self._merge_pool_kwargs(pool_kwargs) + request_context["scheme"] = scheme or "http" + if not port: + port = port_by_scheme.get(request_context["scheme"].lower(), 80) + request_context["port"] = port + request_context["host"] = host + + return self.connection_from_context(request_context) + + def connection_from_context( + self, request_context: dict[str, typing.Any] + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. + + ``request_context`` must at least contain the ``scheme`` key and its + value must be a key in ``key_fn_by_scheme`` instance variable. + """ + if "strict" in request_context: + warnings.warn( + "The 'strict' parameter is no longer needed on Python 3+. " + "This will raise an error in urllib3 v2.1.0.", + DeprecationWarning, + ) + request_context.pop("strict") + + scheme = request_context["scheme"].lower() + pool_key_constructor = self.key_fn_by_scheme.get(scheme) + if not pool_key_constructor: + raise URLSchemeUnknown(scheme) + pool_key = pool_key_constructor(request_context) + + return self.connection_from_pool_key(pool_key, request_context=request_context) + + def connection_from_pool_key( + self, pool_key: PoolKey, request_context: dict[str, typing.Any] + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. + + ``pool_key`` should be a namedtuple that only contains immutable + objects. At a minimum it must have the ``scheme``, ``host``, and + ``port`` fields. + """ + with self.pools.lock: + # If the scheme, host, or port doesn't match existing open + # connections, open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + scheme = request_context["scheme"] + host = request_context["host"] + port = request_context["port"] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + + return pool + + def connection_from_url( + self, url: str, pool_kwargs: dict[str, typing.Any] | None = None + ) -> HTTPConnectionPool: + """ + Similar to :func:`urllib3.connectionpool.connection_from_url`. + + If ``pool_kwargs`` is not provided and a new pool needs to be + constructed, ``self.connection_pool_kw`` is used to initialize + the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` + is provided, it is used instead. Note that if a new pool does not + need to be created for the request, the provided ``pool_kwargs`` are + not used. + """ + u = parse_url(url) + return self.connection_from_host( + u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs + ) + + def _merge_pool_kwargs( + self, override: dict[str, typing.Any] | None + ) -> dict[str, typing.Any]: + """ + Merge a dictionary of override values for self.connection_pool_kw. + + This does not modify self.connection_pool_kw and returns a new dict. + Any keys in the override dictionary with a value of ``None`` are + removed from the merged dictionary. + """ + base_pool_kwargs = self.connection_pool_kw.copy() + if override: + for key, value in override.items(): + if value is None: + try: + del base_pool_kwargs[key] + except KeyError: + pass + else: + base_pool_kwargs[key] = value + return base_pool_kwargs + + def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool: + """ + Indicates if the proxy requires the complete destination URL in the + request. Normally this is only needed when not using an HTTP CONNECT + tunnel. + """ + if self.proxy is None: + return False + + return not connection_requires_http_tunnel( + self.proxy, self.proxy_config, parsed_url.scheme + ) + + def urlopen( # type: ignore[override] + self, method: str, url: str, redirect: bool = True, **kw: typing.Any + ) -> BaseHTTPResponse: + """ + Same as :meth:`urllib3.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + + if u.scheme is None: + warnings.warn( + "URLs without a scheme (ie 'https://') are deprecated and will raise an error " + "in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs " + "start with 'https://' or 'http://'. Read more in this issue: " + "https://github.com/urllib3/urllib3/issues/2920", + category=DeprecationWarning, + stacklevel=2, + ) + + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw["assert_same_host"] = False + kw["redirect"] = False + + if "headers" not in kw: + kw["headers"] = self.headers + + if self._proxy_requires_url_absolute_form(u): + response = conn.urlopen(method, url, **kw) + else: + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + # Support relative URLs for redirecting. + redirect_location = urljoin(url, redirect_location) + + if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. + method = "GET" + # And lose the body not to transfer anything sensitive. + kw["body"] = None + kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change() + + retries = kw.get("retries") + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if retries.remove_headers_on_redirect and not conn.is_same_host( + redirect_location + ): + new_headers = kw["headers"].copy() + for header in kw["headers"]: + if header.lower() in retries.remove_headers_on_redirect: + new_headers.pop(header, None) + kw["headers"] = new_headers + + try: + retries = retries.increment(method, url, response=response, _pool=conn) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + kw["retries"] = retries + kw["redirect"] = redirect + + log.info("Redirecting %s -> %s", url, redirect_location) + + response.drain_conn() + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(PoolManager): + """ + Behaves just like :class:`PoolManager`, but sends all requests through + the defined proxy, using the CONNECT method for HTTPS URLs. + + :param proxy_url: + The URL of the proxy to be used. + + :param proxy_headers: + A dictionary containing headers that will be sent to the proxy. In case + of HTTP they are being sent with each request, while in the + HTTPS/CONNECT case they are sent only once. Could be used for proxy + authentication. + + :param proxy_ssl_context: + The proxy SSL context is used to establish the TLS connection to the + proxy when using HTTPS proxies. + + :param use_forwarding_for_https: + (Defaults to False) If set to True will forward requests to the HTTPS + proxy to be made on behalf of the client instead of creating a TLS + tunnel via the CONNECT method. **Enabling this flag means that request + and response headers and content will be visible from the HTTPS proxy** + whereas tunneling keeps request and response headers and content + private. IP address, target hostname, SNI, and port are always visible + to an HTTPS proxy even when this flag is disabled. + + :param proxy_assert_hostname: + The hostname of the certificate to verify against. + + :param proxy_assert_fingerprint: + The fingerprint of the certificate to verify against. + + Example: + + .. code-block:: python + + import urllib3 + + proxy = urllib3.ProxyManager("https://localhost:3128/") + + resp1 = proxy.request("GET", "https://google.com/") + resp2 = proxy.request("GET", "https://httpbin.org/") + + print(len(proxy.pools)) + # 1 + + resp3 = proxy.request("GET", "https://httpbin.org/") + resp4 = proxy.request("GET", "https://twitter.com/") + + print(len(proxy.pools)) + # 3 + + """ + + def __init__( + self, + proxy_url: str, + num_pools: int = 10, + headers: typing.Mapping[str, str] | None = None, + proxy_headers: typing.Mapping[str, str] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + use_forwarding_for_https: bool = False, + proxy_assert_hostname: None | str | Literal[False] = None, + proxy_assert_fingerprint: str | None = None, + **connection_pool_kw: typing.Any, + ) -> None: + if isinstance(proxy_url, HTTPConnectionPool): + str_proxy_url = f"{proxy_url.scheme}://{proxy_url.host}:{proxy_url.port}" + else: + str_proxy_url = proxy_url + proxy = parse_url(str_proxy_url) + + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) + + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + + self.proxy = proxy + self.proxy_headers = proxy_headers or {} + self.proxy_ssl_context = proxy_ssl_context + self.proxy_config = ProxyConfig( + proxy_ssl_context, + use_forwarding_for_https, + proxy_assert_hostname, + proxy_assert_fingerprint, + ) + + connection_pool_kw["_proxy"] = self.proxy + connection_pool_kw["_proxy_headers"] = self.proxy_headers + connection_pool_kw["_proxy_config"] = self.proxy_config + + super().__init__(num_pools, headers, **connection_pool_kw) + + def connection_from_host( + self, + host: str | None, + port: int | None = None, + scheme: str | None = "http", + pool_kwargs: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + if scheme == "https": + return super().connection_from_host( + host, port, scheme, pool_kwargs=pool_kwargs + ) + + return super().connection_from_host( + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs # type: ignore[union-attr] + ) + + def _set_proxy_headers( + self, url: str, headers: typing.Mapping[str, str] | None = None + ) -> typing.Mapping[str, str]: + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {"Accept": "*/*"} + + netloc = parse_url(url).netloc + if netloc: + headers_["Host"] = netloc + + if headers: + headers_.update(headers) + return headers_ + + def urlopen( # type: ignore[override] + self, method: str, url: str, redirect: bool = True, **kw: typing.Any + ) -> BaseHTTPResponse: + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + u = parse_url(url) + if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): + # For connections using HTTP CONNECT, httplib sets the necessary + # headers on the CONNECT to the proxy. If we're not using CONNECT, + # we'll definitely need to set 'Host' at the very least. + headers = kw.get("headers", self.headers) + kw["headers"] = self._set_proxy_headers(url, headers) + + return super().urlopen(method, url, redirect=redirect, **kw) + + +def proxy_from_url(url: str, **kw: typing.Any) -> ProxyManager: + return ProxyManager(proxy_url=url, **kw) diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/py.typed b/env-llmeval/lib/python3.10/site-packages/urllib3/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..5f3ea3d919363f08ab03edbc85b6099bc4df5647 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/py.typed @@ -0,0 +1,2 @@ +# Instruct type checkers to look for inline type annotations in this package. +# See PEP 561. diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/response.py b/env-llmeval/lib/python3.10/site-packages/urllib3/response.py new file mode 100644 index 0000000000000000000000000000000000000000..d31fac9ba0c3ce341c258b711c4c347a760718b8 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/response.py @@ -0,0 +1,1243 @@ +from __future__ import annotations + +import collections +import io +import json as _json +import logging +import re +import sys +import typing +import warnings +import zlib +from contextlib import contextmanager +from http.client import HTTPMessage as _HttplibHTTPMessage +from http.client import HTTPResponse as _HttplibHTTPResponse +from socket import timeout as SocketTimeout + +if typing.TYPE_CHECKING: + from ._base_connection import BaseHTTPConnection + +try: + try: + import brotlicffi as brotli # type: ignore[import-not-found] + except ImportError: + import brotli # type: ignore[import-not-found] +except ImportError: + brotli = None + +try: + import zstandard as zstd # type: ignore[import-not-found] + + # The package 'zstandard' added the 'eof' property starting + # in v0.18.0 which we require to ensure a complete and + # valid zstd stream was fed into the ZstdDecoder. + # See: https://github.com/urllib3/urllib3/pull/2624 + _zstd_version = _zstd_version = tuple( + map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr] + ) + if _zstd_version < (0, 18): # Defensive: + zstd = None + +except (AttributeError, ImportError, ValueError): # Defensive: + zstd = None + +from . import util +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from .connection import BaseSSLError, HTTPConnection, HTTPException +from .exceptions import ( + BodyNotHttplibCompatible, + DecodeError, + HTTPError, + IncompleteRead, + InvalidChunkLength, + InvalidHeader, + ProtocolError, + ReadTimeoutError, + ResponseNotChunked, + SSLError, +) +from .util.response import is_fp_closed, is_response_to_head +from .util.retry import Retry + +if typing.TYPE_CHECKING: + from typing import Literal + + from .connectionpool import HTTPConnectionPool + +log = logging.getLogger(__name__) + + +class ContentDecoder: + def decompress(self, data: bytes) -> bytes: + raise NotImplementedError() + + def flush(self) -> bytes: + raise NotImplementedError() + + +class DeflateDecoder(ContentDecoder): + def __init__(self) -> None: + self._first_try = True + self._data = b"" + self._obj = zlib.decompressobj() + + def decompress(self, data: bytes) -> bytes: + if not data: + return data + + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + decompressed = self._obj.decompress(data) + if decompressed: + self._first_try = False + self._data = None # type: ignore[assignment] + return decompressed + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None # type: ignore[assignment] + + def flush(self) -> bytes: + return self._obj.flush() + + +class GzipDecoderState: + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + +class GzipDecoder(ContentDecoder): + def __init__(self) -> None: + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER + + def decompress(self, data: bytes) -> bytes: + ret = bytearray() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return bytes(ret) + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return bytes(ret) + raise + data = self._obj.unused_data + if not data: + return bytes(ret) + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + + def flush(self) -> bytes: + return self._obj.flush() + + +if brotli is not None: + + class BrotliDecoder(ContentDecoder): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self) -> None: + self._obj = brotli.Decompressor() + if hasattr(self._obj, "decompress"): + setattr(self, "decompress", self._obj.decompress) + else: + setattr(self, "decompress", self._obj.process) + + def flush(self) -> bytes: + if hasattr(self._obj, "flush"): + return self._obj.flush() # type: ignore[no-any-return] + return b"" + + +if zstd is not None: + + class ZstdDecoder(ContentDecoder): + def __init__(self) -> None: + self._obj = zstd.ZstdDecompressor().decompressobj() + + def decompress(self, data: bytes) -> bytes: + if not data: + return b"" + data_parts = [self._obj.decompress(data)] + while self._obj.eof and self._obj.unused_data: + unused_data = self._obj.unused_data + self._obj = zstd.ZstdDecompressor().decompressobj() + data_parts.append(self._obj.decompress(unused_data)) + return b"".join(data_parts) + + def flush(self) -> bytes: + ret = self._obj.flush() # note: this is a no-op + if not self._obj.eof: + raise DecodeError("Zstandard data is incomplete") + return ret # type: ignore[no-any-return] + + +class MultiDecoder(ContentDecoder): + """ + From RFC7231: + If one or more encodings have been applied to a representation, the + sender that applied the encodings MUST generate a Content-Encoding + header field that lists the content codings in the order in which + they were applied. + """ + + def __init__(self, modes: str) -> None: + self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] + + def flush(self) -> bytes: + return self._decoders[0].flush() + + def decompress(self, data: bytes) -> bytes: + for d in reversed(self._decoders): + data = d.decompress(data) + return data + + +def _get_decoder(mode: str) -> ContentDecoder: + if "," in mode: + return MultiDecoder(mode) + + # According to RFC 9110 section 8.4.1.3, recipients should + # consider x-gzip equivalent to gzip + if mode in ("gzip", "x-gzip"): + return GzipDecoder() + + if brotli is not None and mode == "br": + return BrotliDecoder() + + if zstd is not None and mode == "zstd": + return ZstdDecoder() + + return DeflateDecoder() + + +class BytesQueueBuffer: + """Memory-efficient bytes buffer + + To return decoded data in read() and still follow the BufferedIOBase API, we need a + buffer to always return the correct amount of bytes. + + This buffer should be filled using calls to put() + + Our maximum memory usage is determined by the sum of the size of: + + * self.buffer, which contains the full data + * the largest chunk that we will copy in get() + + The worst case scenario is a single chunk, in which case we'll make a full copy of + the data inside get(). + """ + + def __init__(self) -> None: + self.buffer: typing.Deque[bytes] = collections.deque() + self._size: int = 0 + + def __len__(self) -> int: + return self._size + + def put(self, data: bytes) -> None: + self.buffer.append(data) + self._size += len(data) + + def get(self, n: int) -> bytes: + if n == 0: + return b"" + elif not self.buffer: + raise RuntimeError("buffer is empty") + elif n < 0: + raise ValueError("n should be > 0") + + fetched = 0 + ret = io.BytesIO() + while fetched < n: + remaining = n - fetched + chunk = self.buffer.popleft() + chunk_length = len(chunk) + if remaining < chunk_length: + left_chunk, right_chunk = chunk[:remaining], chunk[remaining:] + ret.write(left_chunk) + self.buffer.appendleft(right_chunk) + self._size -= remaining + break + else: + ret.write(chunk) + self._size -= chunk_length + fetched += chunk_length + + if not self.buffer: + break + + return ret.getvalue() + + def get_all(self) -> bytes: + buffer = self.buffer + if not buffer: + assert self._size == 0 + return b"" + if len(buffer) == 1: + result = buffer.pop() + else: + ret = io.BytesIO() + ret.writelines(buffer.popleft() for _ in range(len(buffer))) + result = ret.getvalue() + self._size = 0 + return result + + +class BaseHTTPResponse(io.IOBase): + CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"] + if brotli is not None: + CONTENT_DECODERS += ["br"] + if zstd is not None: + CONTENT_DECODERS += ["zstd"] + REDIRECT_STATUSES = [301, 302, 303, 307, 308] + + DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + + if zstd is not None: + DECODER_ERROR_CLASSES += (zstd.ZstdError,) + + def __init__( + self, + *, + headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, + status: int, + version: int, + reason: str | None, + decode_content: bool, + request_url: str | None, + retries: Retry | None = None, + ) -> None: + if isinstance(headers, HTTPHeaderDict): + self.headers = headers + else: + self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type] + self.status = status + self.version = version + self.reason = reason + self.decode_content = decode_content + self._has_decoded_content = False + self._request_url: str | None = request_url + self.retries = retries + + self.chunked = False + tr_enc = self.headers.get("transfer-encoding", "").lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + self._decoder: ContentDecoder | None = None + self.length_remaining: int | None + + def get_redirect_location(self) -> str | None | Literal[False]: + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in self.REDIRECT_STATUSES: + return self.headers.get("location") + return False + + @property + def data(self) -> bytes: + raise NotImplementedError() + + def json(self) -> typing.Any: + """ + Parses the body of the HTTP response as JSON. + + To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder. + + This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`. + + Read more :ref:`here `. + """ + data = self.data.decode("utf-8") + return _json.loads(data) + + @property + def url(self) -> str | None: + raise NotImplementedError() + + @url.setter + def url(self, url: str | None) -> None: + raise NotImplementedError() + + @property + def connection(self) -> BaseHTTPConnection | None: + raise NotImplementedError() + + @property + def retries(self) -> Retry | None: + return self._retries + + @retries.setter + def retries(self, retries: Retry | None) -> None: + # Override the request_url if retries has a redirect location. + if retries is not None and retries.history: + self.url = retries.history[-1].redirect_location + self._retries = retries + + def stream( + self, amt: int | None = 2**16, decode_content: bool | None = None + ) -> typing.Iterator[bytes]: + raise NotImplementedError() + + def read( + self, + amt: int | None = None, + decode_content: bool | None = None, + cache_content: bool = False, + ) -> bytes: + raise NotImplementedError() + + def read1( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> bytes: + raise NotImplementedError() + + def read_chunked( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> typing.Iterator[bytes]: + raise NotImplementedError() + + def release_conn(self) -> None: + raise NotImplementedError() + + def drain_conn(self) -> None: + raise NotImplementedError() + + def close(self) -> None: + raise NotImplementedError() + + def _init_decoder(self) -> None: + """ + Set-up the _decoder attribute if necessary. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get("content-encoding", "").lower() + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + elif "," in content_encoding: + encodings = [ + e.strip() + for e in content_encoding.split(",") + if e.strip() in self.CONTENT_DECODERS + ] + if encodings: + self._decoder = _get_decoder(content_encoding) + + def _decode( + self, data: bytes, decode_content: bool | None, flush_decoder: bool + ) -> bytes: + """ + Decode the data passed in and potentially flush the decoder. + """ + if not decode_content: + if self._has_decoded_content: + raise RuntimeError( + "Calling read(decode_content=False) is not supported after " + "read(decode_content=True) was called." + ) + return data + + try: + if self._decoder: + data = self._decoder.decompress(data) + self._has_decoded_content = True + except self.DECODER_ERROR_CLASSES as e: + content_encoding = self.headers.get("content-encoding", "").lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, + e, + ) from e + if flush_decoder: + data += self._flush_decoder() + + return data + + def _flush_decoder(self) -> bytes: + """ + Flushes the decoder. Should only be called if the decoder is actually + being used. + """ + if self._decoder: + return self._decoder.decompress(b"") + self._decoder.flush() + return b"" + + # Compatibility methods for `io` module + def readinto(self, b: bytearray) -> int: + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[: len(temp)] = temp + return len(temp) + + # Compatibility methods for http.client.HTTPResponse + def getheaders(self) -> HTTPHeaderDict: + warnings.warn( + "HTTPResponse.getheaders() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.", + category=DeprecationWarning, + stacklevel=2, + ) + return self.headers + + def getheader(self, name: str, default: str | None = None) -> str | None: + warnings.warn( + "HTTPResponse.getheader() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).", + category=DeprecationWarning, + stacklevel=2, + ) + return self.headers.get(name, default) + + # Compatibility method for http.cookiejar + def info(self) -> HTTPHeaderDict: + return self.headers + + def geturl(self) -> str | None: + return self.url + + +class HTTPResponse(BaseHTTPResponse): + """ + HTTP Response container. + + Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. + + Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param original_response: + When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + + :param retries: + The retries contains the last :class:`~urllib3.util.retry.Retry` that + was used during the request. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + + def __init__( + self, + body: _TYPE_BODY = "", + headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, + status: int = 0, + version: int = 0, + reason: str | None = None, + preload_content: bool = True, + decode_content: bool = True, + original_response: _HttplibHTTPResponse | None = None, + pool: HTTPConnectionPool | None = None, + connection: HTTPConnection | None = None, + msg: _HttplibHTTPMessage | None = None, + retries: Retry | None = None, + enforce_content_length: bool = True, + request_method: str | None = None, + request_url: str | None = None, + auto_close: bool = True, + ) -> None: + super().__init__( + headers=headers, + status=status, + version=version, + reason=reason, + decode_content=decode_content, + request_url=request_url, + retries=retries, + ) + + self.enforce_content_length = enforce_content_length + self.auto_close = auto_close + + self._body = None + self._fp: _HttplibHTTPResponse | None = None + self._original_response = original_response + self._fp_bytes_read = 0 + self.msg = msg + + if body and isinstance(body, (str, bytes)): + self._body = body + + self._pool = pool + self._connection = connection + + if hasattr(body, "read"): + self._fp = body # type: ignore[assignment] + + # Are we using the chunked-style of transfer encoding? + self.chunk_left: int | None = None + + # Determine length of response + self.length_remaining = self._init_length(request_method) + + # Used to return the correct amount of bytes for partial read()s + self._decoded_buffer = BytesQueueBuffer() + + # If requested, preload the body. + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def release_conn(self) -> None: + if not self._pool or not self._connection: + return None + + self._pool._put_conn(self._connection) + self._connection = None + + def drain_conn(self) -> None: + """ + Read and discard any remaining HTTP response data in the response connection. + + Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. + """ + try: + self.read() + except (HTTPError, OSError, BaseSSLError, HTTPException): + pass + + @property + def data(self) -> bytes: + # For backwards-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body # type: ignore[return-value] + + if self._fp: + return self.read(cache_content=True) + + return None # type: ignore[return-value] + + @property + def connection(self) -> HTTPConnection | None: + return self._connection + + def isclosed(self) -> bool: + return is_fp_closed(self._fp) + + def tell(self) -> int: + """ + Obtain the number of bytes pulled over the wire so far. May differ from + the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` + if bytes are encoded on the wire (e.g, compressed). + """ + return self._fp_bytes_read + + def _init_length(self, request_method: str | None) -> int | None: + """ + Set initial length value for Response content if available. + """ + length: int | None + content_length: str | None = self.headers.get("content-length") + + if content_length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning( + "Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked." + ) + return None + + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = {int(val) for val in content_length.split(",")} + if len(lengths) > 1: + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % content_length + ) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + else: # if content_length is None + length = None + + # Convert status to int for comparison + # In some cases, httplib returns a status of "_UNKNOWN" + try: + status = int(self.status) + except ValueError: + status = 0 + + # Check for responses that shouldn't include a body + if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": + length = 0 + + return length + + @contextmanager + def _error_catcher(self) -> typing.Generator[None, None, None]: + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + clean_exit = False + + try: + try: + yield + + except SocketTimeout as e: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type] + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if "read operation timed out" not in str(e): + # SSL errors related to framing/MAC get wrapped and reraised here + raise SSLError(e) from e + + raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type] + + except IncompleteRead as e: + if ( + e.expected is not None + and e.partial is not None + and e.expected == -e.partial + ): + arg = "Response may not contain content." + else: + arg = f"Connection broken: {e!r}" + raise ProtocolError(arg, e) from e + + except (HTTPException, OSError) as e: + raise ProtocolError(f"Connection broken: {e!r}", e) from e + + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + def _fp_read( + self, + amt: int | None = None, + *, + read1: bool = False, + ) -> bytes: + """ + Read a response with the thought that reading the number of bytes + larger than can fit in a 32-bit int at a time via SSL in some + known cases leads to an overflow error that has to be prevented + if `amt` or `self.length_remaining` indicate that a problem may + happen. + + The known cases: + * 3.8 <= CPython < 3.9.7 because of a bug + https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900. + * urllib3 injected with pyOpenSSL-backed SSL-support. + * CPython < 3.10 only when `amt` does not fit 32-bit int. + """ + assert self._fp + c_int_max = 2**31 - 1 + if ( + (amt and amt > c_int_max) + or ( + amt is None + and self.length_remaining + and self.length_remaining > c_int_max + ) + ) and (util.IS_PYOPENSSL or sys.version_info < (3, 10)): + if read1: + return self._fp.read1(c_int_max) + buffer = io.BytesIO() + # Besides `max_chunk_amt` being a maximum chunk size, it + # affects memory overhead of reading a response by this + # method in CPython. + # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum + # chunk size that does not lead to an overflow error, but + # 256 MiB is a compromise. + max_chunk_amt = 2**28 + while amt is None or amt != 0: + if amt is not None: + chunk_amt = min(amt, max_chunk_amt) + amt -= chunk_amt + else: + chunk_amt = max_chunk_amt + data = self._fp.read(chunk_amt) + if not data: + break + buffer.write(data) + del data # to reduce peak memory usage by `max_chunk_amt`. + return buffer.getvalue() + elif read1: + return self._fp.read1(amt) if amt is not None else self._fp.read1() + else: + # StringIO doesn't like amt=None + return self._fp.read(amt) if amt is not None else self._fp.read() + + def _raw_read( + self, + amt: int | None = None, + *, + read1: bool = False, + ) -> bytes: + """ + Reads `amt` of bytes from the socket. + """ + if self._fp is None: + return None # type: ignore[return-value] + + fp_closed = getattr(self._fp, "closed", False) + + with self._error_catcher(): + data = self._fp_read(amt, read1=read1) if not fp_closed else b"" + if amt is not None and amt != 0 and not data: + # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + if ( + self.enforce_content_length + and self.length_remaining is not None + and self.length_remaining != 0 + ): + # This is an edge case that httplib failed to cover due + # to concerns of backward compatibility. We're + # addressing it here to make sure IncompleteRead is + # raised during streaming, so all calls with incorrect + # Content-Length are caught. + raise IncompleteRead(self._fp_bytes_read, self.length_remaining) + elif read1 and ( + (amt != 0 and not data) or self.length_remaining == len(data) + ): + # All data has been read, but `self._fp.read1` in + # CPython 3.12 and older doesn't always close + # `http.client.HTTPResponse`, so we close it here. + # See https://github.com/python/cpython/issues/113199 + self._fp.close() + + if data: + self._fp_bytes_read += len(data) + if self.length_remaining is not None: + self.length_remaining -= len(data) + return data + + def read( + self, + amt: int | None = None, + decode_content: bool | None = None, + cache_content: bool = False, + ) -> bytes: + """ + Similar to :meth:`http.client.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + self._init_decoder() + if decode_content is None: + decode_content = self.decode_content + + if amt is not None: + cache_content = False + + if len(self._decoded_buffer) >= amt: + return self._decoded_buffer.get(amt) + + data = self._raw_read(amt) + + flush_decoder = amt is None or (amt != 0 and not data) + + if not data and len(self._decoded_buffer) == 0: + return data + + if amt is None: + data = self._decode(data, decode_content, flush_decoder) + if cache_content: + self._body = data + else: + # do not waste memory on buffer when not decoding + if not decode_content: + if self._has_decoded_content: + raise RuntimeError( + "Calling read(decode_content=False) is not supported after " + "read(decode_content=True) was called." + ) + return data + + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + + while len(self._decoded_buffer) < amt and data: + # TODO make sure to initially read enough data to get past the headers + # For example, the GZ file header takes 10 bytes, we don't want to read + # it one byte at a time + data = self._raw_read(amt) + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + data = self._decoded_buffer.get(amt) + + return data + + def read1( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> bytes: + """ + Similar to ``http.client.HTTPResponse.read1`` and documented + in :meth:`io.BufferedReader.read1`, but with an additional parameter: + ``decode_content``. + + :param amt: + How much of the content to read. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if decode_content is None: + decode_content = self.decode_content + # try and respond without going to the network + if self._has_decoded_content: + if not decode_content: + raise RuntimeError( + "Calling read1(decode_content=False) is not supported after " + "read1(decode_content=True) was called." + ) + if len(self._decoded_buffer) > 0: + if amt is None: + return self._decoded_buffer.get_all() + return self._decoded_buffer.get(amt) + if amt == 0: + return b"" + + # FIXME, this method's type doesn't say returning None is possible + data = self._raw_read(amt, read1=True) + if not decode_content or data is None: + return data + + self._init_decoder() + while True: + flush_decoder = not data + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + if decoded_data or flush_decoder: + break + data = self._raw_read(8192, read1=True) + + if amt is None: + return self._decoded_buffer.get_all() + return self._decoded_buffer.get(amt) + + def stream( + self, amt: int | None = 2**16, decode_content: bool | None = None + ) -> typing.Generator[bytes, None, None]: + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if self.chunked and self.supports_chunked_reads(): + yield from self.read_chunked(amt, decode_content=decode_content) + else: + while not is_fp_closed(self._fp) or len(self._decoded_buffer) > 0: + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + + # Overrides from io.IOBase + def readable(self) -> bool: + return True + + def close(self) -> None: + if not self.closed and self._fp: + self._fp.close() + + if self._connection: + self._connection.close() + + if not self.auto_close: + io.IOBase.close(self) + + @property + def closed(self) -> bool: + if not self.auto_close: + return io.IOBase.closed.__get__(self) # type: ignore[no-any-return] + elif self._fp is None: + return True + elif hasattr(self._fp, "isclosed"): + return self._fp.isclosed() + elif hasattr(self._fp, "closed"): + return self._fp.closed + else: + return True + + def fileno(self) -> int: + if self._fp is None: + raise OSError("HTTPResponse has no file to get a fileno from") + elif hasattr(self._fp, "fileno"): + return self._fp.fileno() + else: + raise OSError( + "The file-like object this HTTPResponse is wrapped " + "around has no file descriptor" + ) + + def flush(self) -> None: + if ( + self._fp is not None + and hasattr(self._fp, "flush") + and not getattr(self._fp, "closed", False) + ): + return self._fp.flush() + + def supports_chunked_reads(self) -> bool: + """ + Checks if the underlying file-like object looks like a + :class:`http.client.HTTPResponse` object. We do this by testing for + the fp attribute. If it is present we assume it returns raw chunks as + processed by read_chunked(). + """ + return hasattr(self._fp, "fp") + + def _update_chunk_length(self) -> None: + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return None + line = self._fp.fp.readline() # type: ignore[union-attr] + line = line.split(b";", 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + self.close() + if line: + # Invalid chunked protocol response, abort. + raise InvalidChunkLength(self, line) from None + else: + # Truncated at start of next chunk + raise ProtocolError("Response ended prematurely") from None + + def _handle_chunk(self, amt: int | None) -> bytes: + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr] + returned_chunk = chunk + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif self.chunk_left is not None and amt < self.chunk_left: + value = self._fp._safe_read(amt) # type: ignore[union-attr] + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) # type: ignore[union-attr] + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr] + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk # type: ignore[no-any-return] + + def read_chunked( + self, amt: int | None = None, decode_content: bool | None = None + ) -> typing.Generator[bytes, None, None]: + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked( + "Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing." + ) + if not self.supports_chunked_reads(): + raise BodyNotHttplibCompatible( + "Body should be http.client.HTTPResponse like. " + "It should have have an fp attribute which returns raw chunks." + ) + + with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return None + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: # type: ignore[union-attr] + return None + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + decoded = self._decode( + chunk, decode_content=decode_content, flush_decoder=False + ) + if decoded: + yield decoded + + if decode_content: + # On CPython and PyPy, we should never need to flush the + # decoder. However, on Jython we *might* need to, so + # lets defensively do it anyway. + decoded = self._flush_decoder() + if decoded: # Platform-specific: Jython. + yield decoded + + # Chunk content ends with \r\n: discard it. + while self._fp is not None: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b"\r\n": + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + + @property + def url(self) -> str | None: + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + return self._request_url + + @url.setter + def url(self, url: str) -> None: + self._request_url = url + + def __iter__(self) -> typing.Iterator[bytes]: + buffer: list[bytes] = [] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunks = chunk.split(b"\n") + yield b"".join(buffer) + chunks[0] + b"\n" + for x in chunks[1:-1]: + yield x + b"\n" + if chunks[-1]: + buffer = [chunks[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer) diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/__init__.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..534126033c083203649022fa9b753a433f005556 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__init__.py @@ -0,0 +1,42 @@ +# For backwards compatibility, provide imports that used to be here. +from __future__ import annotations + +from .connection import is_connection_dropped +from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers +from .response import is_fp_closed +from .retry import Retry +from .ssl_ import ( + ALPN_PROTOCOLS, + IS_PYOPENSSL, + SSLContext, + assert_fingerprint, + create_urllib3_context, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .timeout import Timeout +from .url import Url, parse_url +from .wait import wait_for_read, wait_for_write + +__all__ = ( + "IS_PYOPENSSL", + "SSLContext", + "ALPN_PROTOCOLS", + "Retry", + "Timeout", + "Url", + "assert_fingerprint", + "create_urllib3_context", + "is_connection_dropped", + "is_fp_closed", + "parse_url", + "make_headers", + "resolve_cert_reqs", + "resolve_ssl_version", + "ssl_wrap_socket", + "wait_for_read", + "wait_for_write", + "SKIP_HEADER", + "SKIPPABLE_HEADERS", +) diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/proxy.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/proxy.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..15397b475e2dba2784ddd987824dff40f80e7848 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/proxy.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/response.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/response.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d858a869123e82cf6bf6caa9db948e0ff9e015ac Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/response.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29b5372099dca3dde80c44135c0a14ffd925ee39 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/url.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/url.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2efe06bf9c1476560ad5790c4aeeabb172f2e71f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/url.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/wait.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/wait.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..43c5783b2b5c55738f37bdd65bd970412f910fb6 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/urllib3/util/__pycache__/wait.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/request.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/request.py new file mode 100644 index 0000000000000000000000000000000000000000..fe0e3485e8a886f824b2e3e21458c80f7017c25e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/request.py @@ -0,0 +1,256 @@ +from __future__ import annotations + +import io +import typing +from base64 import b64encode +from enum import Enum + +from ..exceptions import UnrewindableBodyError +from .util import to_bytes + +if typing.TYPE_CHECKING: + from typing import Final + +# Pass as a value within ``headers`` to skip +# emitting some HTTP headers that are added automatically. +# The only headers that are supported are ``Accept-Encoding``, +# ``Host``, and ``User-Agent``. +SKIP_HEADER = "@@@SKIP_HEADER@@@" +SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) + +ACCEPT_ENCODING = "gzip,deflate" +try: + try: + import brotlicffi as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401 + except ImportError: + import brotli as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",br" +try: + import zstandard as _unused_module_zstd # type: ignore[import-not-found] # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",zstd" + + +class _TYPE_FAILEDTELL(Enum): + token = 0 + + +_FAILEDTELL: Final[_TYPE_FAILEDTELL] = _TYPE_FAILEDTELL.token + +_TYPE_BODY_POSITION = typing.Union[int, _TYPE_FAILEDTELL] + +# When sending a request with these methods we aren't expecting +# a body so don't need to set an explicit 'Content-Length: 0' +# The reason we do this in the negative instead of tracking methods +# which 'should' have a body is because unknown methods should be +# treated as if they were 'POST' which *does* expect a body. +_METHODS_NOT_EXPECTING_BODY = {"GET", "HEAD", "DELETE", "TRACE", "OPTIONS", "CONNECT"} + + +def make_headers( + keep_alive: bool | None = None, + accept_encoding: bool | list[str] | str | None = None, + user_agent: str | None = None, + basic_auth: str | None = None, + proxy_basic_auth: str | None = None, + disable_cache: bool | None = None, +) -> dict[str, str]: + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. If either the ``brotli`` or + ``brotlicffi`` package is installed 'gzip,deflate,br' is used instead. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example: + + .. code-block:: python + + import urllib3 + + print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0")) + # {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + print(urllib3.util.make_headers(accept_encoding=True)) + # {'accept-encoding': 'gzip,deflate'} + """ + headers: dict[str, str] = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ",".join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers["accept-encoding"] = accept_encoding + + if user_agent: + headers["user-agent"] = user_agent + + if keep_alive: + headers["connection"] = "keep-alive" + + if basic_auth: + headers[ + "authorization" + ] = f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}" + + if proxy_basic_auth: + headers[ + "proxy-authorization" + ] = f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}" + + if disable_cache: + headers["cache-control"] = "no-cache" + + return headers + + +def set_file_position( + body: typing.Any, pos: _TYPE_BODY_POSITION | None +) -> _TYPE_BODY_POSITION | None: + """ + If a position is provided, move file to that point. + Otherwise, we'll attempt to record a position for future use. + """ + if pos is not None: + rewind_body(body, pos) + elif getattr(body, "tell", None) is not None: + try: + pos = body.tell() + except OSError: + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body. + pos = _FAILEDTELL + + return pos + + +def rewind_body(body: typing.IO[typing.AnyStr], body_pos: _TYPE_BODY_POSITION) -> None: + """ + Attempt to rewind body to a certain position. + Primarily used for request redirects and retries. + + :param body: + File-like object that supports seek. + + :param int pos: + Position to seek to in file. + """ + body_seek = getattr(body, "seek", None) + if body_seek is not None and isinstance(body_pos, int): + try: + body_seek(body_pos) + except OSError as e: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect/retry." + ) from e + elif body_pos is _FAILEDTELL: + raise UnrewindableBodyError( + "Unable to record file position for rewinding " + "request body during a redirect/retry." + ) + else: + raise ValueError( + f"body_pos must be of type integer, instead it was {type(body_pos)}." + ) + + +class ChunksAndContentLength(typing.NamedTuple): + chunks: typing.Iterable[bytes] | None + content_length: int | None + + +def body_to_chunks( + body: typing.Any | None, method: str, blocksize: int +) -> ChunksAndContentLength: + """Takes the HTTP request method, body, and blocksize and + transforms them into an iterable of chunks to pass to + socket.sendall() and an optional 'Content-Length' header. + + A 'Content-Length' of 'None' indicates the length of the body + can't be determined so should use 'Transfer-Encoding: chunked' + for framing instead. + """ + + chunks: typing.Iterable[bytes] | None + content_length: int | None + + # No body, we need to make a recommendation on 'Content-Length' + # based on whether that request method is expected to have + # a body or not. + if body is None: + chunks = None + if method.upper() not in _METHODS_NOT_EXPECTING_BODY: + content_length = 0 + else: + content_length = None + + # Bytes or strings become bytes + elif isinstance(body, (str, bytes)): + chunks = (to_bytes(body),) + content_length = len(chunks[0]) + + # File-like object, TODO: use seek() and tell() for length? + elif hasattr(body, "read"): + + def chunk_readable() -> typing.Iterable[bytes]: + nonlocal body, blocksize + encode = isinstance(body, io.TextIOBase) + while True: + datablock = body.read(blocksize) + if not datablock: + break + if encode: + datablock = datablock.encode("iso-8859-1") + yield datablock + + chunks = chunk_readable() + content_length = None + + # Otherwise we need to start checking via duck-typing. + else: + try: + # Check if the body implements the buffer API. + mv = memoryview(body) + except TypeError: + try: + # Check if the body is an iterable + chunks = iter(body) + content_length = None + except TypeError: + raise TypeError( + f"'body' must be a bytes-like object, file-like " + f"object, or iterable. Instead was {body!r}" + ) from None + else: + # Since it implements the buffer API can be passed directly to socket.sendall() + chunks = (body,) + content_length = mv.nbytes + + return ChunksAndContentLength(chunks=chunks, content_length=content_length) diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/response.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/response.py new file mode 100644 index 0000000000000000000000000000000000000000..0f4578696fa2e17a900c6890ec26d65e860b0b72 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/response.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import http.client as httplib +from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect + +from ..exceptions import HeaderParsingError + + +def is_fp_closed(obj: object) -> bool: + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + + try: + # Check `isclosed()` first, in case Python3 doesn't set `closed`. + # GH Issue #928 + return obj.isclosed() # type: ignore[no-any-return, attr-defined] + except AttributeError: + pass + + try: + # Check via the official file-like-object way. + return obj.closed # type: ignore[no-any-return, attr-defined] + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None # type: ignore[attr-defined] + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") + + +def assert_header_parsing(headers: httplib.HTTPMessage) -> None: + """ + Asserts whether all headers have been successfully parsed. + Extracts encountered errors from the result of parsing headers. + + Only works on Python 3. + + :param http.client.HTTPMessage headers: Headers to verify. + + :raises urllib3.exceptions.HeaderParsingError: + If parsing errors are found. + """ + + # This will fail silently if we pass in the wrong kind of parameter. + # To make debugging easier add an explicit check. + if not isinstance(headers, httplib.HTTPMessage): + raise TypeError(f"expected httplib.Message, got {type(headers)}.") + + unparsed_data = None + + # get_payload is actually email.message.Message.get_payload; + # we're only interested in the result if it's not a multipart message + if not headers.is_multipart(): + payload = headers.get_payload() + + if isinstance(payload, (bytes, str)): + unparsed_data = payload + + # httplib is assuming a response body is available + # when parsing headers even when httplib only sends + # header data to parse_headers() This results in + # defects on multipart responses in particular. + # See: https://github.com/urllib3/urllib3/issues/800 + + # So we ignore the following defects: + # - StartBoundaryNotFoundDefect: + # The claimed start boundary was never found. + # - MultipartInvariantViolationDefect: + # A message claimed to be a multipart but no subparts were found. + defects = [ + defect + for defect in headers.defects + if not isinstance( + defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) + ) + ] + + if defects or unparsed_data: + raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) + + +def is_response_to_head(response: httplib.HTTPResponse) -> bool: + """ + Checks whether the request of a response has been a HEAD-request. + + :param http.client.HTTPResponse response: + Response to check if the originating request + used 'HEAD' as a method. + """ + # FIXME: Can we do this somehow without accessing private httplib _method? + method_str = response._method # type: str # type: ignore[attr-defined] + return method_str.upper() == "HEAD" diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/retry.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/retry.py new file mode 100644 index 0000000000000000000000000000000000000000..7572bfd26ad87711d67c3418a6a0ac9921fed08c --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/retry.py @@ -0,0 +1,529 @@ +from __future__ import annotations + +import email +import logging +import random +import re +import time +import typing +from itertools import takewhile +from types import TracebackType + +from ..exceptions import ( + ConnectTimeoutError, + InvalidHeader, + MaxRetryError, + ProtocolError, + ProxyError, + ReadTimeoutError, + ResponseError, +) +from .util import reraise + +if typing.TYPE_CHECKING: + from ..connectionpool import ConnectionPool + from ..response import BaseHTTPResponse + +log = logging.getLogger(__name__) + + +# Data structure for representing the metadata of requests that result in a retry. +class RequestHistory(typing.NamedTuple): + method: str | None + url: str | None + error: Exception | None + status: int | None + redirect_location: str | None + + +class Retry: + """Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool: + + .. code-block:: python + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request("GET", "https://example.com/") + + Or per-request (which overrides the default for the pool): + + .. code-block:: python + + response = http.request("GET", "https://example.com/", retries=Retry(10)) + + Retries can be disabled by passing ``False``: + + .. code-block:: python + + response = http.request("GET", "https://example.com/", retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int status: + How many times to retry on bad status codes. + + These are retries made on responses, where status code matches + ``status_forcelist``. + + Set to ``0`` to fail on the first retry of this type. + + :param int other: + How many times to retry on other errors. + + Other errors are errors that are not connect, read, redirect or status errors. + These errors might be raised after the request was sent to the server, so the + request might have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + If ``total`` is not set, it's a good idea to set this to 0 to account + for unexpected edge cases and avoid infinite retry loops. + + :param Collection allowed_methods: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + idempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. + + Set to a ``None`` value to retry on any verb. + + :param Collection status_forcelist: + A set of integer HTTP status codes that we should force a retry on. + A retry is initiated if the request method is in ``allowed_methods`` + and the response status code is in ``status_forcelist``. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a + delay). urllib3 will sleep for:: + + {backoff factor} * (2 ** ({number of previous retries})) + + seconds. If `backoff_jitter` is non-zero, this sleep is extended by:: + + random.uniform(0, {backoff jitter}) + + seconds. For example, if the backoff_factor is 0.1, then :func:`Retry.sleep` will + sleep for [0.0s, 0.2s, 0.4s, 0.8s, ...] between retries. No backoff will ever + be longer than `backoff_max`. + + By default, backoff is disabled (factor set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + + :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: + whether we should raise an exception, or return a response, + if status falls in ``status_forcelist`` range and retries have + been exhausted. + + :param tuple history: The history of the request encountered during + each call to :meth:`~Retry.increment`. The list is in the order + the requests occurred. Each list item is of class :class:`RequestHistory`. + + :param bool respect_retry_after_header: + Whether to respect Retry-After header on status codes defined as + :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. + + :param Collection remove_headers_on_redirect: + Sequence of headers to remove from the request when a response + indicating a redirect is returned before firing off the redirected + request. + """ + + #: Default methods to be used for ``allowed_methods`` + DEFAULT_ALLOWED_METHODS = frozenset( + ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] + ) + + #: Default status codes to be used for ``status_forcelist`` + RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + + #: Default headers to be used for ``remove_headers_on_redirect`` + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) + + #: Default maximum backoff time. + DEFAULT_BACKOFF_MAX = 120 + + # Backward compatibility; assigned outside of the class. + DEFAULT: typing.ClassVar[Retry] + + def __init__( + self, + total: bool | int | None = 10, + connect: int | None = None, + read: int | None = None, + redirect: bool | int | None = None, + status: int | None = None, + other: int | None = None, + allowed_methods: typing.Collection[str] | None = DEFAULT_ALLOWED_METHODS, + status_forcelist: typing.Collection[int] | None = None, + backoff_factor: float = 0, + backoff_max: float = DEFAULT_BACKOFF_MAX, + raise_on_redirect: bool = True, + raise_on_status: bool = True, + history: tuple[RequestHistory, ...] | None = None, + respect_retry_after_header: bool = True, + remove_headers_on_redirect: typing.Collection[ + str + ] = DEFAULT_REMOVE_HEADERS_ON_REDIRECT, + backoff_jitter: float = 0.0, + ) -> None: + self.total = total + self.connect = connect + self.read = read + self.status = status + self.other = other + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.allowed_methods = allowed_methods + self.backoff_factor = backoff_factor + self.backoff_max = backoff_max + self.raise_on_redirect = raise_on_redirect + self.raise_on_status = raise_on_status + self.history = history or () + self.respect_retry_after_header = respect_retry_after_header + self.remove_headers_on_redirect = frozenset( + h.lower() for h in remove_headers_on_redirect + ) + self.backoff_jitter = backoff_jitter + + def new(self, **kw: typing.Any) -> Retry: + params = dict( + total=self.total, + connect=self.connect, + read=self.read, + redirect=self.redirect, + status=self.status, + other=self.other, + allowed_methods=self.allowed_methods, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + backoff_max=self.backoff_max, + raise_on_redirect=self.raise_on_redirect, + raise_on_status=self.raise_on_status, + history=self.history, + remove_headers_on_redirect=self.remove_headers_on_redirect, + respect_retry_after_header=self.respect_retry_after_header, + backoff_jitter=self.backoff_jitter, + ) + + params.update(kw) + return type(self)(**params) # type: ignore[arg-type] + + @classmethod + def from_int( + cls, + retries: Retry | bool | int | None, + redirect: bool | int | None = True, + default: Retry | bool | int | None = None, + ) -> Retry: + """Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r", retries, new_retries) + return new_retries + + def get_backoff_time(self) -> float: + """Formula for computing the current backoff + + :rtype: float + """ + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len( + list( + takewhile(lambda x: x.redirect_location is None, reversed(self.history)) + ) + ) + if consecutive_errors_len <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) + if self.backoff_jitter != 0.0: + backoff_value += random.random() * self.backoff_jitter + return float(max(0, min(self.backoff_max, backoff_value))) + + def parse_retry_after(self, retry_after: str) -> float: + seconds: float + # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = int(retry_after) + else: + retry_date_tuple = email.utils.parsedate_tz(retry_after) + if retry_date_tuple is None: + raise InvalidHeader(f"Invalid Retry-After header: {retry_after}") + + retry_date = email.utils.mktime_tz(retry_date_tuple) + seconds = retry_date - time.time() + + seconds = max(seconds, 0) + + return seconds + + def get_retry_after(self, response: BaseHTTPResponse) -> float | None: + """Get the value of Retry-After in seconds.""" + + retry_after = response.headers.get("Retry-After") + + if retry_after is None: + return None + + return self.parse_retry_after(retry_after) + + def sleep_for_retry(self, response: BaseHTTPResponse) -> bool: + retry_after = self.get_retry_after(response) + if retry_after: + time.sleep(retry_after) + return True + + return False + + def _sleep_backoff(self) -> None: + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def sleep(self, response: BaseHTTPResponse | None = None) -> None: + """Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + """ + + if self.respect_retry_after_header and response: + slept = self.sleep_for_retry(response) + if slept: + return + + self._sleep_backoff() + + def _is_connection_error(self, err: Exception) -> bool: + """Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + if isinstance(err, ProxyError): + err = err.original_error + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err: Exception) -> bool: + """Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def _is_method_retryable(self, method: str) -> bool: + """Checks if a given HTTP method should be retried upon, depending if + it is included in the allowed_methods + """ + if self.allowed_methods and method.upper() not in self.allowed_methods: + return False + return True + + def is_retry( + self, method: str, status_code: int, has_retry_after: bool = False + ) -> bool: + """Is this method/status code retryable? (Based on allowlists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + if not self._is_method_retryable(method): + return False + + if self.status_forcelist and status_code in self.status_forcelist: + return True + + return bool( + self.total + and self.respect_retry_after_header + and has_retry_after + and (status_code in self.RETRY_AFTER_STATUS_CODES) + ) + + def is_exhausted(self) -> bool: + """Are we out of retries?""" + retry_counts = [ + x + for x in ( + self.total, + self.connect, + self.read, + self.redirect, + self.status, + self.other, + ) + if x + ] + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment( + self, + method: str | None = None, + url: str | None = None, + response: BaseHTTPResponse | None = None, + error: Exception | None = None, + _pool: ConnectionPool | None = None, + _stacktrace: TracebackType | None = None, + ) -> Retry: + """Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.BaseHTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + connect = self.connect + read = self.read + redirect = self.redirect + status_count = self.status + other = self.other + cause = "unknown" + status = None + redirect_location = None + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False or method is None or not self._is_method_retryable(method): + raise reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + + elif error: + # Other retry? + if other is not None: + other -= 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + cause = "too many redirects" + response_redirect_location = response.get_redirect_location() + if response_redirect_location: + redirect_location = response_redirect_location + status = response.status + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and the given method is in the allowed_methods + cause = ResponseError.GENERIC_ERROR + if response and response.status: + if status_count is not None: + status_count -= 1 + cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) + status = response.status + + history = self.history + ( + RequestHistory(method, url, error, status, redirect_location), + ) + + new_retry = self.new( + total=total, + connect=connect, + read=read, + redirect=redirect, + status=status_count, + other=other, + history=history, + ) + + if new_retry.is_exhausted(): + reason = error or ResponseError(cause) + raise MaxRetryError(_pool, url, reason) from reason # type: ignore[arg-type] + + log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) + + return new_retry + + def __repr__(self) -> str: + return ( + f"{type(self).__name__}(total={self.total}, connect={self.connect}, " + f"read={self.read}, redirect={self.redirect}, status={self.status})" + ) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/ssl_.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/ssl_.py new file mode 100644 index 0000000000000000000000000000000000000000..b14cf27b6141371b4232abdb64a45d9b68c5fc8f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/ssl_.py @@ -0,0 +1,509 @@ +from __future__ import annotations + +import hmac +import os +import socket +import sys +import typing +import warnings +from binascii import unhexlify +from hashlib import md5, sha1, sha256 + +from ..exceptions import ProxySchemeUnsupported, SSLError +from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE + +SSLContext = None +SSLTransport = None +HAS_NEVER_CHECK_COMMON_NAME = False +IS_PYOPENSSL = False +ALPN_PROTOCOLS = ["http/1.1"] + +_TYPE_VERSION_INFO = typing.Tuple[int, int, int, str, int] + +# Maps the length of a digest to a possible hash function producing this digest +HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} + + +def _is_bpo_43522_fixed( + implementation_name: str, + version_info: _TYPE_VERSION_INFO, + pypy_version_info: _TYPE_VERSION_INFO | None, +) -> bool: + """Return True for CPython 3.8.9+, 3.9.3+ or 3.10+ and PyPy 7.3.8+ where + setting SSLContext.hostname_checks_common_name to False works. + + Outside of CPython and PyPy we don't know which implementations work + or not so we conservatively use our hostname matching as we know that works + on all implementations. + + https://github.com/urllib3/urllib3/issues/2192#issuecomment-821832963 + https://foss.heptapod.net/pypy/pypy/-/issues/3539 + """ + if implementation_name == "pypy": + # https://foss.heptapod.net/pypy/pypy/-/issues/3129 + return pypy_version_info >= (7, 3, 8) # type: ignore[operator] + elif implementation_name == "cpython": + major_minor = version_info[:2] + micro = version_info[2] + return ( + (major_minor == (3, 8) and micro >= 9) + or (major_minor == (3, 9) and micro >= 3) + or major_minor >= (3, 10) + ) + else: # Defensive: + return False + + +def _is_has_never_check_common_name_reliable( + openssl_version: str, + openssl_version_number: int, + implementation_name: str, + version_info: _TYPE_VERSION_INFO, + pypy_version_info: _TYPE_VERSION_INFO | None, +) -> bool: + # As of May 2023, all released versions of LibreSSL fail to reject certificates with + # only common names, see https://github.com/urllib3/urllib3/pull/3024 + is_openssl = openssl_version.startswith("OpenSSL ") + # Before fixing OpenSSL issue #14579, the SSL_new() API was not copying hostflags + # like X509_CHECK_FLAG_NEVER_CHECK_SUBJECT, which tripped up CPython. + # https://github.com/openssl/openssl/issues/14579 + # This was released in OpenSSL 1.1.1l+ (>=0x101010cf) + is_openssl_issue_14579_fixed = openssl_version_number >= 0x101010CF + + return is_openssl and ( + is_openssl_issue_14579_fixed + or _is_bpo_43522_fixed(implementation_name, version_info, pypy_version_info) + ) + + +if typing.TYPE_CHECKING: + from ssl import VerifyMode + from typing import Literal, TypedDict + + from .ssltransport import SSLTransport as SSLTransportType + + class _TYPE_PEER_CERT_RET_DICT(TypedDict, total=False): + subjectAltName: tuple[tuple[str, str], ...] + subject: tuple[tuple[tuple[str, str], ...], ...] + serialNumber: str + + +# Mapping from 'ssl.PROTOCOL_TLSX' to 'TLSVersion.X' +_SSL_VERSION_TO_TLS_VERSION: dict[int, int] = {} + +try: # Do we have ssl at all? + import ssl + from ssl import ( # type: ignore[assignment] + CERT_REQUIRED, + HAS_NEVER_CHECK_COMMON_NAME, + OP_NO_COMPRESSION, + OP_NO_TICKET, + OPENSSL_VERSION, + OPENSSL_VERSION_NUMBER, + PROTOCOL_TLS, + PROTOCOL_TLS_CLIENT, + OP_NO_SSLv2, + OP_NO_SSLv3, + SSLContext, + TLSVersion, + ) + + PROTOCOL_SSLv23 = PROTOCOL_TLS + + # Setting SSLContext.hostname_checks_common_name = False didn't work before CPython + # 3.8.9, 3.9.3, and 3.10 (but OK on PyPy) or OpenSSL 1.1.1l+ + if HAS_NEVER_CHECK_COMMON_NAME and not _is_has_never_check_common_name_reliable( + OPENSSL_VERSION, + OPENSSL_VERSION_NUMBER, + sys.implementation.name, + sys.version_info, + sys.pypy_version_info if sys.implementation.name == "pypy" else None, # type: ignore[attr-defined] + ): + HAS_NEVER_CHECK_COMMON_NAME = False + + # Need to be careful here in case old TLS versions get + # removed in future 'ssl' module implementations. + for attr in ("TLSv1", "TLSv1_1", "TLSv1_2"): + try: + _SSL_VERSION_TO_TLS_VERSION[getattr(ssl, f"PROTOCOL_{attr}")] = getattr( + TLSVersion, attr + ) + except AttributeError: # Defensive: + continue + + from .ssltransport import SSLTransport # type: ignore[assignment] +except ImportError: + OP_NO_COMPRESSION = 0x20000 # type: ignore[assignment] + OP_NO_TICKET = 0x4000 # type: ignore[assignment] + OP_NO_SSLv2 = 0x1000000 # type: ignore[assignment] + OP_NO_SSLv3 = 0x2000000 # type: ignore[assignment] + PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 # type: ignore[assignment] + PROTOCOL_TLS_CLIENT = 16 # type: ignore[assignment] + + +_TYPE_PEER_CERT_RET = typing.Union["_TYPE_PEER_CERT_RET_DICT", bytes, None] + + +def assert_fingerprint(cert: bytes | None, fingerprint: str) -> None: + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + if cert is None: + raise SSLError("No certificate for the peer.") + + fingerprint = fingerprint.replace(":", "").lower() + digest_length = len(fingerprint) + hashfunc = HASHFUNC_MAP.get(digest_length) + if not hashfunc: + raise SSLError(f"Fingerprint of invalid length: {fingerprint}") + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + cert_digest = hashfunc(cert).digest() + + if not hmac.compare_digest(cert_digest, fingerprint_bytes): + raise SSLError( + f'Fingerprints did not match. Expected "{fingerprint}", got "{cert_digest.hex()}"' + ) + + +def resolve_cert_reqs(candidate: None | int | str) -> VerifyMode: + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_REQUIRED`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbreviation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_REQUIRED + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "CERT_" + candidate) + return res # type: ignore[no-any-return] + + return candidate # type: ignore[return-value] + + +def resolve_ssl_version(candidate: None | int | str) -> int: + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_TLS + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "PROTOCOL_" + candidate) + return typing.cast(int, res) + + return candidate + + +def create_urllib3_context( + ssl_version: int | None = None, + cert_reqs: int | None = None, + options: int | None = None, + ciphers: str | None = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, +) -> ssl.SSLContext: + """Creates and configures an :class:`ssl.SSLContext` instance for use with urllib3. + + :param ssl_version: + The desired protocol version to use. This will default to + PROTOCOL_SSLv23 which will negotiate the highest protocol that both + the server and your installation of OpenSSL support. + + This parameter is deprecated instead use 'ssl_minimum_version'. + :param ssl_minimum_version: + The minimum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value. + :param ssl_maximum_version: + The maximum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value. + Not recommended to set to anything other than 'ssl.TLSVersion.MAXIMUM_SUPPORTED' which is the + default value. + :param cert_reqs: + Whether to require the certificate verification. This defaults to + ``ssl.CERT_REQUIRED``. + :param options: + Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. + :param ciphers: + Which cipher suites to allow the server to select. Defaults to either system configured + ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers. + :returns: + Constructed SSLContext object with specified options + :rtype: SSLContext + """ + if SSLContext is None: + raise TypeError("Can't create an SSLContext object without an ssl module") + + # This means 'ssl_version' was specified as an exact value. + if ssl_version not in (None, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT): + # Disallow setting 'ssl_version' and 'ssl_minimum|maximum_version' + # to avoid conflicts. + if ssl_minimum_version is not None or ssl_maximum_version is not None: + raise ValueError( + "Can't specify both 'ssl_version' and either " + "'ssl_minimum_version' or 'ssl_maximum_version'" + ) + + # 'ssl_version' is deprecated and will be removed in the future. + else: + # Use 'ssl_minimum_version' and 'ssl_maximum_version' instead. + ssl_minimum_version = _SSL_VERSION_TO_TLS_VERSION.get( + ssl_version, TLSVersion.MINIMUM_SUPPORTED + ) + ssl_maximum_version = _SSL_VERSION_TO_TLS_VERSION.get( + ssl_version, TLSVersion.MAXIMUM_SUPPORTED + ) + + # This warning message is pushing users to use 'ssl_minimum_version' + # instead of both min/max. Best practice is to only set the minimum version and + # keep the maximum version to be it's default value: 'TLSVersion.MAXIMUM_SUPPORTED' + warnings.warn( + "'ssl_version' option is deprecated and will be " + "removed in urllib3 v2.1.0. Instead use 'ssl_minimum_version'", + category=DeprecationWarning, + stacklevel=2, + ) + + # PROTOCOL_TLS is deprecated in Python 3.10 so we always use PROTOCOL_TLS_CLIENT + context = SSLContext(PROTOCOL_TLS_CLIENT) + + if ssl_minimum_version is not None: + context.minimum_version = ssl_minimum_version + else: # Python <3.10 defaults to 'MINIMUM_SUPPORTED' so explicitly set TLSv1.2 here + context.minimum_version = TLSVersion.TLSv1_2 + + if ssl_maximum_version is not None: + context.maximum_version = ssl_maximum_version + + # Unless we're given ciphers defer to either system ciphers in + # the case of OpenSSL 1.1.1+ or use our own secure default ciphers. + if ciphers: + context.set_ciphers(ciphers) + + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue #309) + options |= OP_NO_COMPRESSION + # TLSv1.2 only. Unless set explicitly, do not request tickets. + # This may save some bandwidth on wire, and although the ticket is encrypted, + # there is a risk associated with it being on wire, + # if the server is not rotating its ticketing keys properly. + options |= OP_NO_TICKET + + context.options |= options + + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is + # necessary for conditional client cert authentication with TLS 1.3. + # The attribute is None for OpenSSL <= 1.1.0 or does not exist when using + # an SSLContext created by pyOpenSSL. + if getattr(context, "post_handshake_auth", None) is not None: + context.post_handshake_auth = True + + # The order of the below lines setting verify_mode and check_hostname + # matter due to safe-guards SSLContext has to prevent an SSLContext with + # check_hostname=True, verify_mode=NONE/OPTIONAL. + # We always set 'check_hostname=False' for pyOpenSSL so we rely on our own + # 'ssl.match_hostname()' implementation. + if cert_reqs == ssl.CERT_REQUIRED and not IS_PYOPENSSL: + context.verify_mode = cert_reqs + context.check_hostname = True + else: + context.check_hostname = False + context.verify_mode = cert_reqs + + try: + context.hostname_checks_common_name = False + except AttributeError: # Defensive: for CPython < 3.8.9 and 3.9.3; for PyPy < 7.3.8 + pass + + # Enable logging of TLS session keys via defacto standard environment variable + # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. + if hasattr(context, "keylog_filename"): + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile: + context.keylog_filename = sslkeylogfile + + return context + + +@typing.overload +def ssl_wrap_socket( + sock: socket.socket, + keyfile: str | None = ..., + certfile: str | None = ..., + cert_reqs: int | None = ..., + ca_certs: str | None = ..., + server_hostname: str | None = ..., + ssl_version: int | None = ..., + ciphers: str | None = ..., + ssl_context: ssl.SSLContext | None = ..., + ca_cert_dir: str | None = ..., + key_password: str | None = ..., + ca_cert_data: None | str | bytes = ..., + tls_in_tls: Literal[False] = ..., +) -> ssl.SSLSocket: + ... + + +@typing.overload +def ssl_wrap_socket( + sock: socket.socket, + keyfile: str | None = ..., + certfile: str | None = ..., + cert_reqs: int | None = ..., + ca_certs: str | None = ..., + server_hostname: str | None = ..., + ssl_version: int | None = ..., + ciphers: str | None = ..., + ssl_context: ssl.SSLContext | None = ..., + ca_cert_dir: str | None = ..., + key_password: str | None = ..., + ca_cert_data: None | str | bytes = ..., + tls_in_tls: bool = ..., +) -> ssl.SSLSocket | SSLTransportType: + ... + + +def ssl_wrap_socket( + sock: socket.socket, + keyfile: str | None = None, + certfile: str | None = None, + cert_reqs: int | None = None, + ca_certs: str | None = None, + server_hostname: str | None = None, + ssl_version: int | None = None, + ciphers: str | None = None, + ssl_context: ssl.SSLContext | None = None, + ca_cert_dir: str | None = None, + key_password: str | None = None, + ca_cert_data: None | str | bytes = None, + tls_in_tls: bool = False, +) -> ssl.SSLSocket | SSLTransportType: + """ + All arguments except for server_hostname, ssl_context, tls_in_tls, ca_cert_data and + ca_cert_dir have the same meaning as they do when using + :func:`ssl.create_default_context`, :meth:`ssl.SSLContext.load_cert_chain`, + :meth:`ssl.SSLContext.set_ciphers` and :meth:`ssl.SSLContext.wrap_socket`. + + :param server_hostname: + When SNI is supported, the expected hostname of the certificate + :param ssl_context: + A pre-made :class:`SSLContext` object. If none is provided, one will + be created using :func:`create_urllib3_context`. + :param ciphers: + A string of ciphers we wish the client to support. + :param ca_cert_dir: + A directory containing CA certificates in multiple separate files, as + supported by OpenSSL's -CApath flag or the capath argument to + SSLContext.load_verify_locations(). + :param key_password: + Optional password if the keyfile is encrypted. + :param ca_cert_data: + Optional string containing CA certificates in PEM format suitable for + passing as the cadata parameter to SSLContext.load_verify_locations() + :param tls_in_tls: + Use SSLTransport to wrap the existing socket. + """ + context = ssl_context + if context is None: + # Note: This branch of code and all the variables in it are only used in tests. + # We should consider deprecating and removing this code. + context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) + + if ca_certs or ca_cert_dir or ca_cert_data: + try: + context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) + except OSError as e: + raise SSLError(e) from e + + elif ssl_context is None and hasattr(context, "load_default_certs"): + # try to load OS default certs; works well on Windows. + context.load_default_certs() + + # Attempt to detect if we get the goofy behavior of the + # keyfile being encrypted and OpenSSL asking for the + # passphrase via the terminal and instead error out. + if keyfile and key_password is None and _is_key_file_encrypted(keyfile): + raise SSLError("Client private key is encrypted, password is required") + + if certfile: + if key_password is None: + context.load_cert_chain(certfile, keyfile) + else: + context.load_cert_chain(certfile, keyfile, key_password) + + try: + context.set_alpn_protocols(ALPN_PROTOCOLS) + except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols + pass + + ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname) + return ssl_sock + + +def is_ipaddress(hostname: str | bytes) -> bool: + """Detects whether the hostname given is an IPv4 or IPv6 address. + Also detects IPv6 addresses with Zone IDs. + + :param str hostname: Hostname to examine. + :return: True if the hostname is an IP address, False otherwise. + """ + if isinstance(hostname, bytes): + # IDN A-label bytes are ASCII compatible. + hostname = hostname.decode("ascii") + return bool(_IPV4_RE.match(hostname) or _BRACELESS_IPV6_ADDRZ_RE.match(hostname)) + + +def _is_key_file_encrypted(key_file: str) -> bool: + """Detects if a key file is encrypted or not.""" + with open(key_file) as f: + for line in f: + # Look for Proc-Type: 4,ENCRYPTED + if "ENCRYPTED" in line: + return True + + return False + + +def _ssl_wrap_socket_impl( + sock: socket.socket, + ssl_context: ssl.SSLContext, + tls_in_tls: bool, + server_hostname: str | None = None, +) -> ssl.SSLSocket | SSLTransportType: + if tls_in_tls: + if not SSLTransport: + # Import error, ssl is not available. + raise ProxySchemeUnsupported( + "TLS in TLS requires support for the 'ssl' module" + ) + + SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context) + return SSLTransport(sock, ssl_context, server_hostname) + + return ssl_context.wrap_socket(sock, server_hostname=server_hostname) diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/ssl_match_hostname.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/ssl_match_hostname.py new file mode 100644 index 0000000000000000000000000000000000000000..453cfd420d835be58b5af581c3065e7b37079ecf --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/ssl_match_hostname.py @@ -0,0 +1,159 @@ +"""The match_hostname() function from Python 3.5, essential when using SSL.""" + +# Note: This file is under the PSF license as the code comes from the python +# stdlib. http://docs.python.org/3/license.html +# It is modified to remove commonName support. + +from __future__ import annotations + +import ipaddress +import re +import typing +from ipaddress import IPv4Address, IPv6Address + +if typing.TYPE_CHECKING: + from .ssl_ import _TYPE_PEER_CERT_RET_DICT + +__version__ = "3.5.0.1" + + +class CertificateError(ValueError): + pass + + +def _dnsname_match( + dn: typing.Any, hostname: str, max_wildcards: int = 1 +) -> typing.Match[str] | None | bool: + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r".") + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count("*") + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn) + ) + + # speed up common case w/o wildcards + if not wildcards: + return bool(dn.lower() == hostname.lower()) + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == "*": + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append("[^.]+") + elif leftmost.startswith("xn--") or hostname.startswith("xn--"): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) + return pat.match(hostname) + + +def _ipaddress_match(ipname: str, host_ip: IPv4Address | IPv6Address) -> bool: + """Exact matching of IP addresses. + + RFC 9110 section 4.3.5: "A reference identity of IP-ID contains the decoded + bytes of the IP address. An IP version 4 address is 4 octets, and an IP + version 6 address is 16 octets. [...] A reference identity of type IP-ID + matches if the address is identical to an iPAddress value of the + subjectAltName extension of the certificate." + """ + # OpenSSL may add a trailing newline to a subjectAltName's IP address + # Divergence from upstream: ipaddress can't handle byte str + ip = ipaddress.ip_address(ipname.rstrip()) + return bool(ip.packed == host_ip.packed) + + +def match_hostname( + cert: _TYPE_PEER_CERT_RET_DICT | None, + hostname: str, + hostname_checks_common_name: bool = False, +) -> None: + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError( + "empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED" + ) + try: + # Divergence from upstream: ipaddress can't handle byte str + # + # The ipaddress module shipped with Python < 3.9 does not support + # scoped IPv6 addresses so we unconditionally strip the Zone IDs for + # now. Once we drop support for Python 3.9 we can remove this branch. + if "%" in hostname: + host_ip = ipaddress.ip_address(hostname[: hostname.rfind("%")]) + else: + host_ip = ipaddress.ip_address(hostname) + + except ValueError: + # Not an IP address (common case) + host_ip = None + dnsnames = [] + san: tuple[tuple[str, str], ...] = cert.get("subjectAltName", ()) + key: str + value: str + for key, value in san: + if key == "DNS": + if host_ip is None and _dnsname_match(value, hostname): + return + dnsnames.append(value) + elif key == "IP Address": + if host_ip is not None and _ipaddress_match(value, host_ip): + return + dnsnames.append(value) + + # We only check 'commonName' if it's enabled and we're not verifying + # an IP address. IP addresses aren't valid within 'commonName'. + if hostname_checks_common_name and host_ip is None and not dnsnames: + for sub in cert.get("subject", ()): + for key, value in sub: + if key == "commonName": + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + + if len(dnsnames) > 1: + raise CertificateError( + "hostname %r " + "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) + ) + elif len(dnsnames) == 1: + raise CertificateError(f"hostname {hostname!r} doesn't match {dnsnames[0]!r}") + else: + raise CertificateError("no appropriate subjectAltName fields were found") diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/url.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/url.py new file mode 100644 index 0000000000000000000000000000000000000000..d53ea932a0309181a4e07596c773f3765eb36977 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/url.py @@ -0,0 +1,471 @@ +from __future__ import annotations + +import re +import typing + +from ..exceptions import LocationParseError +from .util import to_str + +# We only want to normalize urls with an HTTP(S) scheme. +# urllib3 infers URLs without a scheme (None) to be http. +_NORMALIZABLE_SCHEMES = ("http", "https", None) + +# Almost all of these patterns were derived from the +# 'rfc3986' module: https://github.com/python-hyper/rfc3986 +_PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}") +_SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)") +_URI_RE = re.compile( + r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?" + r"(?://([^\\/?#]*))?" + r"([^?#]*)" + r"(?:\?([^#]*))?" + r"(?:#(.*))?$", + re.UNICODE | re.DOTALL, +) + +_IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" +_HEX_PAT = "[0-9A-Fa-f]{1,4}" +_LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=_HEX_PAT, ipv4=_IPV4_PAT) +_subs = {"hex": _HEX_PAT, "ls32": _LS32_PAT} +_variations = [ + # 6( h16 ":" ) ls32 + "(?:%(hex)s:){6}%(ls32)s", + # "::" 5( h16 ":" ) ls32 + "::(?:%(hex)s:){5}%(ls32)s", + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", + # [ *4( h16 ":" ) h16 ] "::" ls32 + "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", + # [ *5( h16 ":" ) h16 ] "::" h16 + "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", + # [ *6( h16 ":" ) h16 ] "::" + "(?:(?:%(hex)s:){0,6}%(hex)s)?::", +] + +_UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~" +_IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" +_ZONE_ID_PAT = "(?:%25|%)(?:[" + _UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" +_IPV6_ADDRZ_PAT = r"\[" + _IPV6_PAT + r"(?:" + _ZONE_ID_PAT + r")?\]" +_REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" +_TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") + +_IPV4_RE = re.compile("^" + _IPV4_PAT + "$") +_IPV6_RE = re.compile("^" + _IPV6_PAT + "$") +_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT + "$") +_BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT[2:-2] + "$") +_ZONE_ID_RE = re.compile("(" + _ZONE_ID_PAT + r")\]$") + +_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % ( + _REG_NAME_PAT, + _IPV4_PAT, + _IPV6_ADDRZ_PAT, +) +_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL) + +_UNRESERVED_CHARS = set( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" +) +_SUB_DELIM_CHARS = set("!$&'()*+,;=") +_USERINFO_CHARS = _UNRESERVED_CHARS | _SUB_DELIM_CHARS | {":"} +_PATH_CHARS = _USERINFO_CHARS | {"@", "/"} +_QUERY_CHARS = _FRAGMENT_CHARS = _PATH_CHARS | {"?"} + + +class Url( + typing.NamedTuple( + "Url", + [ + ("scheme", typing.Optional[str]), + ("auth", typing.Optional[str]), + ("host", typing.Optional[str]), + ("port", typing.Optional[int]), + ("path", typing.Optional[str]), + ("query", typing.Optional[str]), + ("fragment", typing.Optional[str]), + ], + ) +): + """ + Data structure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. Both the scheme and host are normalized as they are + both case-insensitive according to RFC 3986. + """ + + def __new__( # type: ignore[no-untyped-def] + cls, + scheme: str | None = None, + auth: str | None = None, + host: str | None = None, + port: int | None = None, + path: str | None = None, + query: str | None = None, + fragment: str | None = None, + ): + if path and not path.startswith("/"): + path = "/" + path + if scheme is not None: + scheme = scheme.lower() + return super().__new__(cls, scheme, auth, host, port, path, query, fragment) + + @property + def hostname(self) -> str | None: + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self) -> str: + """Absolute path including the query string.""" + uri = self.path or "/" + + if self.query is not None: + uri += "?" + self.query + + return uri + + @property + def authority(self) -> str | None: + """ + Authority component as defined in RFC 3986 3.2. + This includes userinfo (auth), host and port. + + i.e. + userinfo@host:port + """ + userinfo = self.auth + netloc = self.netloc + if netloc is None or userinfo is None: + return netloc + else: + return f"{userinfo}@{netloc}" + + @property + def netloc(self) -> str | None: + """ + Network location including host and port. + + If you need the equivalent of urllib.parse's ``netloc``, + use the ``authority`` property instead. + """ + if self.host is None: + return None + if self.port: + return f"{self.host}:{self.port}" + return self.host + + @property + def url(self) -> str: + """ + Convert self into a url + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port will have : removed). + + Example: + + .. code-block:: python + + import urllib3 + + U = urllib3.util.parse_url("https://google.com/mail/") + + print(U.url) + # "https://google.com/mail/" + + print( urllib3.util.Url("https", "username:password", + "host.com", 80, "/path", "query", "fragment" + ).url + ) + # "https://username:password@host.com:80/path?query#fragment" + """ + scheme, auth, host, port, path, query, fragment = self + url = "" + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url += scheme + "://" + if auth is not None: + url += auth + "@" + if host is not None: + url += host + if port is not None: + url += ":" + str(port) + if path is not None: + url += path + if query is not None: + url += "?" + query + if fragment is not None: + url += "#" + fragment + + return url + + def __str__(self) -> str: + return self.url + + +@typing.overload +def _encode_invalid_chars( + component: str, allowed_chars: typing.Container[str] +) -> str: # Abstract + ... + + +@typing.overload +def _encode_invalid_chars( + component: None, allowed_chars: typing.Container[str] +) -> None: # Abstract + ... + + +def _encode_invalid_chars( + component: str | None, allowed_chars: typing.Container[str] +) -> str | None: + """Percent-encodes a URI component without reapplying + onto an already percent-encoded component. + """ + if component is None: + return component + + component = to_str(component) + + # Normalize existing percent-encoded bytes. + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + component, percent_encodings = _PERCENT_RE.subn( + lambda match: match.group(0).upper(), component + ) + + uri_bytes = component.encode("utf-8", "surrogatepass") + is_percent_encoded = percent_encodings == uri_bytes.count(b"%") + encoded_component = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring + byte = uri_bytes[i : i + 1] + byte_ord = ord(byte) + if (is_percent_encoded and byte == b"%") or ( + byte_ord < 128 and byte.decode() in allowed_chars + ): + encoded_component += byte + continue + encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) + + return encoded_component.decode() + + +def _remove_path_dot_segments(path: str) -> str: + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = path.split("/") # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == ".": + continue + # Anything other than '..', should be appended to the output + if segment != "..": + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if path.startswith("/") and (not output or output[0]): + output.insert(0, "") + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if path.endswith(("/.", "/..")): + output.append("") + + return "/".join(output) + + +@typing.overload +def _normalize_host(host: None, scheme: str | None) -> None: + ... + + +@typing.overload +def _normalize_host(host: str, scheme: str | None) -> str: + ... + + +def _normalize_host(host: str | None, scheme: str | None) -> str | None: + if host: + if scheme in _NORMALIZABLE_SCHEMES: + is_ipv6 = _IPV6_ADDRZ_RE.match(host) + if is_ipv6: + # IPv6 hosts of the form 'a::b%zone' are encoded in a URL as + # such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID + # separator as necessary to return a valid RFC 4007 scoped IP. + match = _ZONE_ID_RE.search(host) + if match: + start, end = match.span(1) + zone_id = host[start:end] + + if zone_id.startswith("%25") and zone_id != "%25": + zone_id = zone_id[3:] + else: + zone_id = zone_id[1:] + zone_id = _encode_invalid_chars(zone_id, _UNRESERVED_CHARS) + return f"{host[:start].lower()}%{zone_id}{host[end:]}" + else: + return host.lower() + elif not _IPV4_RE.match(host): + return to_str( + b".".join([_idna_encode(label) for label in host.split(".")]), + "ascii", + ) + return host + + +def _idna_encode(name: str) -> bytes: + if not name.isascii(): + try: + import idna + except ImportError: + raise LocationParseError( + "Unable to parse URL without the 'idna' module" + ) from None + + try: + return idna.encode(name.lower(), strict=True, std3_rules=True) + except idna.IDNAError: + raise LocationParseError( + f"Name '{name}' is not a valid IDNA label" + ) from None + + return name.lower().encode("ascii") + + +def _encode_target(target: str) -> str: + """Percent-encodes a request target so that there are no invalid characters + + Pre-condition for this function is that 'target' must start with '/'. + If that is the case then _TARGET_RE will always produce a match. + """ + match = _TARGET_RE.match(target) + if not match: # Defensive: + raise LocationParseError(f"{target!r} is not a valid request URI") + + path, query = match.groups() + encoded_target = _encode_invalid_chars(path, _PATH_CHARS) + if query is not None: + query = _encode_invalid_chars(query, _QUERY_CHARS) + encoded_target += "?" + query + return encoded_target + + +def parse_url(url: str) -> Url: + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + This parser is RFC 3986 and RFC 6874 compliant. + + The parser logic and helper functions are based heavily on + work done in the ``rfc3986`` module. + + :param str url: URL to parse into a :class:`.Url` namedtuple. + + Partly backwards-compatible with :mod:`urllib.parse`. + + Example: + + .. code-block:: python + + import urllib3 + + print( urllib3.util.parse_url('http://google.com/mail/')) + # Url(scheme='http', host='google.com', port=None, path='/mail/', ...) + + print( urllib3.util.parse_url('google.com:80')) + # Url(scheme=None, host='google.com', port=80, path=None, ...) + + print( urllib3.util.parse_url('/foo?bar')) + # Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + if not url: + # Empty + return Url() + + source_url = url + if not _SCHEME_RE.search(url): + url = "//" + url + + scheme: str | None + authority: str | None + auth: str | None + host: str | None + port: str | None + port_int: int | None + path: str | None + query: str | None + fragment: str | None + + try: + scheme, authority, path, query, fragment = _URI_RE.match(url).groups() # type: ignore[union-attr] + normalize_uri = scheme is None or scheme.lower() in _NORMALIZABLE_SCHEMES + + if scheme: + scheme = scheme.lower() + + if authority: + auth, _, host_port = authority.rpartition("@") + auth = auth or None + host, port = _HOST_PORT_RE.match(host_port).groups() # type: ignore[union-attr] + if auth and normalize_uri: + auth = _encode_invalid_chars(auth, _USERINFO_CHARS) + if port == "": + port = None + else: + auth, host, port = None, None, None + + if port is not None: + port_int = int(port) + if not (0 <= port_int <= 65535): + raise LocationParseError(url) + else: + port_int = None + + host = _normalize_host(host, scheme) + + if normalize_uri and path: + path = _remove_path_dot_segments(path) + path = _encode_invalid_chars(path, _PATH_CHARS) + if normalize_uri and query: + query = _encode_invalid_chars(query, _QUERY_CHARS) + if normalize_uri and fragment: + fragment = _encode_invalid_chars(fragment, _FRAGMENT_CHARS) + + except (ValueError, AttributeError) as e: + raise LocationParseError(source_url) from e + + # For the sake of backwards compatibility we put empty + # string values for path if there are any defined values + # beyond the path in the URL. + # TODO: Remove this when we break backwards compatibility. + if not path: + if query is not None or fragment is not None: + path = "" + else: + path = None + + return Url( + scheme=scheme, + auth=auth, + host=host, + port=port_int, + path=path, + query=query, + fragment=fragment, + ) diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/util.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/util.py new file mode 100644 index 0000000000000000000000000000000000000000..35c77e4025842f548565334a3c04cba90f9283d6 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/util.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import typing +from types import TracebackType + + +def to_bytes( + x: str | bytes, encoding: str | None = None, errors: str | None = None +) -> bytes: + if isinstance(x, bytes): + return x + elif not isinstance(x, str): + raise TypeError(f"not expecting type {type(x).__name__}") + if encoding or errors: + return x.encode(encoding or "utf-8", errors=errors or "strict") + return x.encode() + + +def to_str( + x: str | bytes, encoding: str | None = None, errors: str | None = None +) -> str: + if isinstance(x, str): + return x + elif not isinstance(x, bytes): + raise TypeError(f"not expecting type {type(x).__name__}") + if encoding or errors: + return x.decode(encoding or "utf-8", errors=errors or "strict") + return x.decode() + + +def reraise( + tp: type[BaseException] | None, + value: BaseException, + tb: TracebackType | None = None, +) -> typing.NoReturn: + try: + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None # type: ignore[assignment] + tb = None diff --git a/env-llmeval/lib/python3.10/site-packages/urllib3/util/wait.py b/env-llmeval/lib/python3.10/site-packages/urllib3/util/wait.py new file mode 100644 index 0000000000000000000000000000000000000000..aeca0c7ad5b232eeb1ad9c43d315bd1d74eaed9a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/urllib3/util/wait.py @@ -0,0 +1,124 @@ +from __future__ import annotations + +import select +import socket +from functools import partial + +__all__ = ["wait_for_read", "wait_for_write"] + + +# How should we wait on sockets? +# +# There are two types of APIs you can use for waiting on sockets: the fancy +# modern stateful APIs like epoll/kqueue, and the older stateless APIs like +# select/poll. The stateful APIs are more efficient when you have a lots of +# sockets to keep track of, because you can set them up once and then use them +# lots of times. But we only ever want to wait on a single socket at a time +# and don't want to keep track of state, so the stateless APIs are actually +# more efficient. So we want to use select() or poll(). +# +# Now, how do we choose between select() and poll()? On traditional Unixes, +# select() has a strange calling convention that makes it slow, or fail +# altogether, for high-numbered file descriptors. The point of poll() is to fix +# that, so on Unixes, we prefer poll(). +# +# On Windows, there is no poll() (or at least Python doesn't provide a wrapper +# for it), but that's OK, because on Windows, select() doesn't have this +# strange calling convention; plain select() works fine. +# +# So: on Windows we use select(), and everywhere else we use poll(). We also +# fall back to select() in case poll() is somehow broken or missing. + + +def select_wait_for_socket( + sock: socket.socket, + read: bool = False, + write: bool = False, + timeout: float | None = None, +) -> bool: + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + rcheck = [] + wcheck = [] + if read: + rcheck.append(sock) + if write: + wcheck.append(sock) + # When doing a non-blocking connect, most systems signal success by + # marking the socket writable. Windows, though, signals success by marked + # it as "exceptional". We paper over the difference by checking the write + # sockets for both conditions. (The stdlib selectors module does the same + # thing.) + fn = partial(select.select, rcheck, wcheck, wcheck) + rready, wready, xready = fn(timeout) + return bool(rready or wready or xready) + + +def poll_wait_for_socket( + sock: socket.socket, + read: bool = False, + write: bool = False, + timeout: float | None = None, +) -> bool: + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + mask = 0 + if read: + mask |= select.POLLIN + if write: + mask |= select.POLLOUT + poll_obj = select.poll() + poll_obj.register(sock, mask) + + # For some reason, poll() takes timeout in milliseconds + def do_poll(t: float | None) -> list[tuple[int, int]]: + if t is not None: + t *= 1000 + return poll_obj.poll(t) + + return bool(do_poll(timeout)) + + +def _have_working_poll() -> bool: + # Apparently some systems have a select.poll that fails as soon as you try + # to use it, either due to strange configuration or broken monkeypatching + # from libraries like eventlet/greenlet. + try: + poll_obj = select.poll() + poll_obj.poll(0) + except (AttributeError, OSError): + return False + else: + return True + + +def wait_for_socket( + sock: socket.socket, + read: bool = False, + write: bool = False, + timeout: float | None = None, +) -> bool: + # We delay choosing which implementation to use until the first time we're + # called. We could do it at import time, but then we might make the wrong + # decision if someone goes wild with monkeypatching select.poll after + # we're imported. + global wait_for_socket + if _have_working_poll(): + wait_for_socket = poll_wait_for_socket + elif hasattr(select, "select"): + wait_for_socket = select_wait_for_socket + return wait_for_socket(sock, read, write, timeout) + + +def wait_for_read(sock: socket.socket, timeout: float | None = None) -> bool: + """Waits for reading to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, read=True, timeout=timeout) + + +def wait_for_write(sock: socket.socket, timeout: float | None = None) -> bool: + """Waits for writing to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, write=True, timeout=timeout)