Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__init__.py +154 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/help.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/_internal_utils.py +42 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/auth.py +305 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/compat.py +77 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/cookies.py +549 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/exceptions.py +133 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/help.py +132 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/models.py +973 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/packages.py +16 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/sessions.py +771 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/status_codes.py +123 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/structures.py +105 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/utils.py +1060 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__init__.py +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py +36 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc
ADDED
Binary file (1.51 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-310.pyc
ADDED
Binary file (4.07 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc
ADDED
Binary file (3.31 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-310.pyc
ADDED
Binary file (5.15 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-310.pyc
ADDED
Binary file (934 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-310.pyc
ADDED
Binary file (979 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc
ADDED
Binary file (4.59 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-310.pyc
ADDED
Binary file (5.77 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-310.pyc
ADDED
Binary file (3.28 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-310.pyc
ADDED
Binary file (4.41 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__init__.py
ADDED
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
# __
|
4 |
+
# /__) _ _ _ _ _/ _
|
5 |
+
# / ( (- (/ (/ (- _) / _)
|
6 |
+
# /
|
7 |
+
|
8 |
+
"""
|
9 |
+
Requests HTTP Library
|
10 |
+
~~~~~~~~~~~~~~~~~~~~~
|
11 |
+
|
12 |
+
Requests is an HTTP library, written in Python, for human beings.
|
13 |
+
Basic GET usage:
|
14 |
+
|
15 |
+
>>> import requests
|
16 |
+
>>> r = requests.get('https://www.python.org')
|
17 |
+
>>> r.status_code
|
18 |
+
200
|
19 |
+
>>> b'Python is a programming language' in r.content
|
20 |
+
True
|
21 |
+
|
22 |
+
... or POST:
|
23 |
+
|
24 |
+
>>> payload = dict(key1='value1', key2='value2')
|
25 |
+
>>> r = requests.post('https://httpbin.org/post', data=payload)
|
26 |
+
>>> print(r.text)
|
27 |
+
{
|
28 |
+
...
|
29 |
+
"form": {
|
30 |
+
"key1": "value1",
|
31 |
+
"key2": "value2"
|
32 |
+
},
|
33 |
+
...
|
34 |
+
}
|
35 |
+
|
36 |
+
The other HTTP methods are supported - see `requests.api`. Full documentation
|
37 |
+
is at <https://requests.readthedocs.io>.
|
38 |
+
|
39 |
+
:copyright: (c) 2017 by Kenneth Reitz.
|
40 |
+
:license: Apache 2.0, see LICENSE for more details.
|
41 |
+
"""
|
42 |
+
|
43 |
+
from pip._vendor import urllib3
|
44 |
+
import warnings
|
45 |
+
from .exceptions import RequestsDependencyWarning
|
46 |
+
|
47 |
+
charset_normalizer_version = None
|
48 |
+
|
49 |
+
try:
|
50 |
+
from pip._vendor.chardet import __version__ as chardet_version
|
51 |
+
except ImportError:
|
52 |
+
chardet_version = None
|
53 |
+
|
54 |
+
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
|
55 |
+
urllib3_version = urllib3_version.split('.')
|
56 |
+
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
|
57 |
+
|
58 |
+
# Sometimes, urllib3 only reports its version as 16.1.
|
59 |
+
if len(urllib3_version) == 2:
|
60 |
+
urllib3_version.append('0')
|
61 |
+
|
62 |
+
# Check urllib3 for compatibility.
|
63 |
+
major, minor, patch = urllib3_version # noqa: F811
|
64 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
65 |
+
# urllib3 >= 1.21.1, <= 1.26
|
66 |
+
assert major == 1
|
67 |
+
assert minor >= 21
|
68 |
+
assert minor <= 26
|
69 |
+
|
70 |
+
# Check charset_normalizer for compatibility.
|
71 |
+
if chardet_version:
|
72 |
+
major, minor, patch = chardet_version.split('.')[:3]
|
73 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
74 |
+
# chardet_version >= 3.0.2, < 5.0.0
|
75 |
+
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
|
76 |
+
elif charset_normalizer_version:
|
77 |
+
major, minor, patch = charset_normalizer_version.split('.')[:3]
|
78 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
79 |
+
# charset_normalizer >= 2.0.0 < 3.0.0
|
80 |
+
assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
|
81 |
+
else:
|
82 |
+
raise Exception("You need either charset_normalizer or chardet installed")
|
83 |
+
|
84 |
+
def _check_cryptography(cryptography_version):
|
85 |
+
# cryptography < 1.3.4
|
86 |
+
try:
|
87 |
+
cryptography_version = list(map(int, cryptography_version.split('.')))
|
88 |
+
except ValueError:
|
89 |
+
return
|
90 |
+
|
91 |
+
if cryptography_version < [1, 3, 4]:
|
92 |
+
warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
|
93 |
+
warnings.warn(warning, RequestsDependencyWarning)
|
94 |
+
|
95 |
+
# Check imported dependencies for compatibility.
|
96 |
+
try:
|
97 |
+
check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
|
98 |
+
except (AssertionError, ValueError):
|
99 |
+
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
100 |
+
"version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
|
101 |
+
RequestsDependencyWarning)
|
102 |
+
|
103 |
+
# Attempt to enable urllib3's fallback for SNI support
|
104 |
+
# if the standard library doesn't support SNI or the
|
105 |
+
# 'ssl' library isn't available.
|
106 |
+
try:
|
107 |
+
# Note: This logic prevents upgrading cryptography on Windows, if imported
|
108 |
+
# as part of pip.
|
109 |
+
from pip._internal.utils.compat import WINDOWS
|
110 |
+
if not WINDOWS:
|
111 |
+
raise ImportError("pip internals: don't import cryptography on Windows")
|
112 |
+
try:
|
113 |
+
import ssl
|
114 |
+
except ImportError:
|
115 |
+
ssl = None
|
116 |
+
|
117 |
+
if not getattr(ssl, "HAS_SNI", False):
|
118 |
+
from pip._vendor.urllib3.contrib import pyopenssl
|
119 |
+
pyopenssl.inject_into_urllib3()
|
120 |
+
|
121 |
+
# Check cryptography version
|
122 |
+
from cryptography import __version__ as cryptography_version
|
123 |
+
_check_cryptography(cryptography_version)
|
124 |
+
except ImportError:
|
125 |
+
pass
|
126 |
+
|
127 |
+
# urllib3's DependencyWarnings should be silenced.
|
128 |
+
from pip._vendor.urllib3.exceptions import DependencyWarning
|
129 |
+
warnings.simplefilter('ignore', DependencyWarning)
|
130 |
+
|
131 |
+
from .__version__ import __title__, __description__, __url__, __version__
|
132 |
+
from .__version__ import __build__, __author__, __author_email__, __license__
|
133 |
+
from .__version__ import __copyright__, __cake__
|
134 |
+
|
135 |
+
from . import utils
|
136 |
+
from . import packages
|
137 |
+
from .models import Request, Response, PreparedRequest
|
138 |
+
from .api import request, get, head, post, patch, put, delete, options
|
139 |
+
from .sessions import session, Session
|
140 |
+
from .status_codes import codes
|
141 |
+
from .exceptions import (
|
142 |
+
RequestException, Timeout, URLRequired,
|
143 |
+
TooManyRedirects, HTTPError, ConnectionError,
|
144 |
+
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
|
145 |
+
)
|
146 |
+
|
147 |
+
# Set default logging handler to avoid "No handler found" warnings.
|
148 |
+
import logging
|
149 |
+
from logging import NullHandler
|
150 |
+
|
151 |
+
logging.getLogger(__name__).addHandler(NullHandler())
|
152 |
+
|
153 |
+
# FileModeWarnings go off per the default.
|
154 |
+
warnings.simplefilter('default', FileModeWarning, append=True)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (4.03 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc
ADDED
Binary file (18.7 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/help.cpython-310.pyc
ADDED
Binary file (2.9 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-310.pyc
ADDED
Binary file (24.4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/_internal_utils.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests._internal_utils
|
5 |
+
~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
Provides utility functions that are consumed internally by Requests
|
8 |
+
which depend on extremely few external helpers (such as compat)
|
9 |
+
"""
|
10 |
+
|
11 |
+
from .compat import is_py2, builtin_str, str
|
12 |
+
|
13 |
+
|
14 |
+
def to_native_string(string, encoding='ascii'):
|
15 |
+
"""Given a string object, regardless of type, returns a representation of
|
16 |
+
that string in the native string type, encoding and decoding where
|
17 |
+
necessary. This assumes ASCII unless told otherwise.
|
18 |
+
"""
|
19 |
+
if isinstance(string, builtin_str):
|
20 |
+
out = string
|
21 |
+
else:
|
22 |
+
if is_py2:
|
23 |
+
out = string.encode(encoding)
|
24 |
+
else:
|
25 |
+
out = string.decode(encoding)
|
26 |
+
|
27 |
+
return out
|
28 |
+
|
29 |
+
|
30 |
+
def unicode_is_ascii(u_string):
|
31 |
+
"""Determine if unicode string only contains ASCII characters.
|
32 |
+
|
33 |
+
:param str u_string: unicode string to check. Must be unicode
|
34 |
+
and not Python 2 `str`.
|
35 |
+
:rtype: bool
|
36 |
+
"""
|
37 |
+
assert isinstance(u_string, str)
|
38 |
+
try:
|
39 |
+
u_string.encode('ascii')
|
40 |
+
return True
|
41 |
+
except UnicodeEncodeError:
|
42 |
+
return False
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/auth.py
ADDED
@@ -0,0 +1,305 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.auth
|
5 |
+
~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module contains the authentication handlers for Requests.
|
8 |
+
"""
|
9 |
+
|
10 |
+
import os
|
11 |
+
import re
|
12 |
+
import time
|
13 |
+
import hashlib
|
14 |
+
import threading
|
15 |
+
import warnings
|
16 |
+
|
17 |
+
from base64 import b64encode
|
18 |
+
|
19 |
+
from .compat import urlparse, str, basestring
|
20 |
+
from .cookies import extract_cookies_to_jar
|
21 |
+
from ._internal_utils import to_native_string
|
22 |
+
from .utils import parse_dict_header
|
23 |
+
|
24 |
+
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
|
25 |
+
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
|
26 |
+
|
27 |
+
|
28 |
+
def _basic_auth_str(username, password):
|
29 |
+
"""Returns a Basic Auth string."""
|
30 |
+
|
31 |
+
# "I want us to put a big-ol' comment on top of it that
|
32 |
+
# says that this behaviour is dumb but we need to preserve
|
33 |
+
# it because people are relying on it."
|
34 |
+
# - Lukasa
|
35 |
+
#
|
36 |
+
# These are here solely to maintain backwards compatibility
|
37 |
+
# for things like ints. This will be removed in 3.0.0.
|
38 |
+
if not isinstance(username, basestring):
|
39 |
+
warnings.warn(
|
40 |
+
"Non-string usernames will no longer be supported in Requests "
|
41 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
42 |
+
"a string or bytes object in the near future to avoid "
|
43 |
+
"problems.".format(username),
|
44 |
+
category=DeprecationWarning,
|
45 |
+
)
|
46 |
+
username = str(username)
|
47 |
+
|
48 |
+
if not isinstance(password, basestring):
|
49 |
+
warnings.warn(
|
50 |
+
"Non-string passwords will no longer be supported in Requests "
|
51 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
52 |
+
"a string or bytes object in the near future to avoid "
|
53 |
+
"problems.".format(type(password)),
|
54 |
+
category=DeprecationWarning,
|
55 |
+
)
|
56 |
+
password = str(password)
|
57 |
+
# -- End Removal --
|
58 |
+
|
59 |
+
if isinstance(username, str):
|
60 |
+
username = username.encode('latin1')
|
61 |
+
|
62 |
+
if isinstance(password, str):
|
63 |
+
password = password.encode('latin1')
|
64 |
+
|
65 |
+
authstr = 'Basic ' + to_native_string(
|
66 |
+
b64encode(b':'.join((username, password))).strip()
|
67 |
+
)
|
68 |
+
|
69 |
+
return authstr
|
70 |
+
|
71 |
+
|
72 |
+
class AuthBase(object):
|
73 |
+
"""Base class that all auth implementations derive from"""
|
74 |
+
|
75 |
+
def __call__(self, r):
|
76 |
+
raise NotImplementedError('Auth hooks must be callable.')
|
77 |
+
|
78 |
+
|
79 |
+
class HTTPBasicAuth(AuthBase):
|
80 |
+
"""Attaches HTTP Basic Authentication to the given Request object."""
|
81 |
+
|
82 |
+
def __init__(self, username, password):
|
83 |
+
self.username = username
|
84 |
+
self.password = password
|
85 |
+
|
86 |
+
def __eq__(self, other):
|
87 |
+
return all([
|
88 |
+
self.username == getattr(other, 'username', None),
|
89 |
+
self.password == getattr(other, 'password', None)
|
90 |
+
])
|
91 |
+
|
92 |
+
def __ne__(self, other):
|
93 |
+
return not self == other
|
94 |
+
|
95 |
+
def __call__(self, r):
|
96 |
+
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
|
97 |
+
return r
|
98 |
+
|
99 |
+
|
100 |
+
class HTTPProxyAuth(HTTPBasicAuth):
|
101 |
+
"""Attaches HTTP Proxy Authentication to a given Request object."""
|
102 |
+
|
103 |
+
def __call__(self, r):
|
104 |
+
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
|
105 |
+
return r
|
106 |
+
|
107 |
+
|
108 |
+
class HTTPDigestAuth(AuthBase):
|
109 |
+
"""Attaches HTTP Digest Authentication to the given Request object."""
|
110 |
+
|
111 |
+
def __init__(self, username, password):
|
112 |
+
self.username = username
|
113 |
+
self.password = password
|
114 |
+
# Keep state in per-thread local storage
|
115 |
+
self._thread_local = threading.local()
|
116 |
+
|
117 |
+
def init_per_thread_state(self):
|
118 |
+
# Ensure state is initialized just once per-thread
|
119 |
+
if not hasattr(self._thread_local, 'init'):
|
120 |
+
self._thread_local.init = True
|
121 |
+
self._thread_local.last_nonce = ''
|
122 |
+
self._thread_local.nonce_count = 0
|
123 |
+
self._thread_local.chal = {}
|
124 |
+
self._thread_local.pos = None
|
125 |
+
self._thread_local.num_401_calls = None
|
126 |
+
|
127 |
+
def build_digest_header(self, method, url):
|
128 |
+
"""
|
129 |
+
:rtype: str
|
130 |
+
"""
|
131 |
+
|
132 |
+
realm = self._thread_local.chal['realm']
|
133 |
+
nonce = self._thread_local.chal['nonce']
|
134 |
+
qop = self._thread_local.chal.get('qop')
|
135 |
+
algorithm = self._thread_local.chal.get('algorithm')
|
136 |
+
opaque = self._thread_local.chal.get('opaque')
|
137 |
+
hash_utf8 = None
|
138 |
+
|
139 |
+
if algorithm is None:
|
140 |
+
_algorithm = 'MD5'
|
141 |
+
else:
|
142 |
+
_algorithm = algorithm.upper()
|
143 |
+
# lambdas assume digest modules are imported at the top level
|
144 |
+
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
|
145 |
+
def md5_utf8(x):
|
146 |
+
if isinstance(x, str):
|
147 |
+
x = x.encode('utf-8')
|
148 |
+
return hashlib.md5(x).hexdigest()
|
149 |
+
hash_utf8 = md5_utf8
|
150 |
+
elif _algorithm == 'SHA':
|
151 |
+
def sha_utf8(x):
|
152 |
+
if isinstance(x, str):
|
153 |
+
x = x.encode('utf-8')
|
154 |
+
return hashlib.sha1(x).hexdigest()
|
155 |
+
hash_utf8 = sha_utf8
|
156 |
+
elif _algorithm == 'SHA-256':
|
157 |
+
def sha256_utf8(x):
|
158 |
+
if isinstance(x, str):
|
159 |
+
x = x.encode('utf-8')
|
160 |
+
return hashlib.sha256(x).hexdigest()
|
161 |
+
hash_utf8 = sha256_utf8
|
162 |
+
elif _algorithm == 'SHA-512':
|
163 |
+
def sha512_utf8(x):
|
164 |
+
if isinstance(x, str):
|
165 |
+
x = x.encode('utf-8')
|
166 |
+
return hashlib.sha512(x).hexdigest()
|
167 |
+
hash_utf8 = sha512_utf8
|
168 |
+
|
169 |
+
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
|
170 |
+
|
171 |
+
if hash_utf8 is None:
|
172 |
+
return None
|
173 |
+
|
174 |
+
# XXX not implemented yet
|
175 |
+
entdig = None
|
176 |
+
p_parsed = urlparse(url)
|
177 |
+
#: path is request-uri defined in RFC 2616 which should not be empty
|
178 |
+
path = p_parsed.path or "/"
|
179 |
+
if p_parsed.query:
|
180 |
+
path += '?' + p_parsed.query
|
181 |
+
|
182 |
+
A1 = '%s:%s:%s' % (self.username, realm, self.password)
|
183 |
+
A2 = '%s:%s' % (method, path)
|
184 |
+
|
185 |
+
HA1 = hash_utf8(A1)
|
186 |
+
HA2 = hash_utf8(A2)
|
187 |
+
|
188 |
+
if nonce == self._thread_local.last_nonce:
|
189 |
+
self._thread_local.nonce_count += 1
|
190 |
+
else:
|
191 |
+
self._thread_local.nonce_count = 1
|
192 |
+
ncvalue = '%08x' % self._thread_local.nonce_count
|
193 |
+
s = str(self._thread_local.nonce_count).encode('utf-8')
|
194 |
+
s += nonce.encode('utf-8')
|
195 |
+
s += time.ctime().encode('utf-8')
|
196 |
+
s += os.urandom(8)
|
197 |
+
|
198 |
+
cnonce = (hashlib.sha1(s).hexdigest()[:16])
|
199 |
+
if _algorithm == 'MD5-SESS':
|
200 |
+
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
|
201 |
+
|
202 |
+
if not qop:
|
203 |
+
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
|
204 |
+
elif qop == 'auth' or 'auth' in qop.split(','):
|
205 |
+
noncebit = "%s:%s:%s:%s:%s" % (
|
206 |
+
nonce, ncvalue, cnonce, 'auth', HA2
|
207 |
+
)
|
208 |
+
respdig = KD(HA1, noncebit)
|
209 |
+
else:
|
210 |
+
# XXX handle auth-int.
|
211 |
+
return None
|
212 |
+
|
213 |
+
self._thread_local.last_nonce = nonce
|
214 |
+
|
215 |
+
# XXX should the partial digests be encoded too?
|
216 |
+
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
217 |
+
'response="%s"' % (self.username, realm, nonce, path, respdig)
|
218 |
+
if opaque:
|
219 |
+
base += ', opaque="%s"' % opaque
|
220 |
+
if algorithm:
|
221 |
+
base += ', algorithm="%s"' % algorithm
|
222 |
+
if entdig:
|
223 |
+
base += ', digest="%s"' % entdig
|
224 |
+
if qop:
|
225 |
+
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
|
226 |
+
|
227 |
+
return 'Digest %s' % (base)
|
228 |
+
|
229 |
+
def handle_redirect(self, r, **kwargs):
|
230 |
+
"""Reset num_401_calls counter on redirects."""
|
231 |
+
if r.is_redirect:
|
232 |
+
self._thread_local.num_401_calls = 1
|
233 |
+
|
234 |
+
def handle_401(self, r, **kwargs):
|
235 |
+
"""
|
236 |
+
Takes the given response and tries digest-auth, if needed.
|
237 |
+
|
238 |
+
:rtype: requests.Response
|
239 |
+
"""
|
240 |
+
|
241 |
+
# If response is not 4xx, do not auth
|
242 |
+
# See https://github.com/psf/requests/issues/3772
|
243 |
+
if not 400 <= r.status_code < 500:
|
244 |
+
self._thread_local.num_401_calls = 1
|
245 |
+
return r
|
246 |
+
|
247 |
+
if self._thread_local.pos is not None:
|
248 |
+
# Rewind the file position indicator of the body to where
|
249 |
+
# it was to resend the request.
|
250 |
+
r.request.body.seek(self._thread_local.pos)
|
251 |
+
s_auth = r.headers.get('www-authenticate', '')
|
252 |
+
|
253 |
+
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
|
254 |
+
|
255 |
+
self._thread_local.num_401_calls += 1
|
256 |
+
pat = re.compile(r'digest ', flags=re.IGNORECASE)
|
257 |
+
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
|
258 |
+
|
259 |
+
# Consume content and release the original connection
|
260 |
+
# to allow our new request to reuse the same one.
|
261 |
+
r.content
|
262 |
+
r.close()
|
263 |
+
prep = r.request.copy()
|
264 |
+
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
265 |
+
prep.prepare_cookies(prep._cookies)
|
266 |
+
|
267 |
+
prep.headers['Authorization'] = self.build_digest_header(
|
268 |
+
prep.method, prep.url)
|
269 |
+
_r = r.connection.send(prep, **kwargs)
|
270 |
+
_r.history.append(r)
|
271 |
+
_r.request = prep
|
272 |
+
|
273 |
+
return _r
|
274 |
+
|
275 |
+
self._thread_local.num_401_calls = 1
|
276 |
+
return r
|
277 |
+
|
278 |
+
def __call__(self, r):
|
279 |
+
# Initialize per-thread state, if needed
|
280 |
+
self.init_per_thread_state()
|
281 |
+
# If we have a saved nonce, skip the 401
|
282 |
+
if self._thread_local.last_nonce:
|
283 |
+
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
|
284 |
+
try:
|
285 |
+
self._thread_local.pos = r.body.tell()
|
286 |
+
except AttributeError:
|
287 |
+
# In the case of HTTPDigestAuth being reused and the body of
|
288 |
+
# the previous request was a file-like object, pos has the
|
289 |
+
# file position of the previous body. Ensure it's set to
|
290 |
+
# None.
|
291 |
+
self._thread_local.pos = None
|
292 |
+
r.register_hook('response', self.handle_401)
|
293 |
+
r.register_hook('response', self.handle_redirect)
|
294 |
+
self._thread_local.num_401_calls = 1
|
295 |
+
|
296 |
+
return r
|
297 |
+
|
298 |
+
def __eq__(self, other):
|
299 |
+
return all([
|
300 |
+
self.username == getattr(other, 'username', None),
|
301 |
+
self.password == getattr(other, 'password', None)
|
302 |
+
])
|
303 |
+
|
304 |
+
def __ne__(self, other):
|
305 |
+
return not self == other
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/compat.py
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.compat
|
5 |
+
~~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module handles import compatibility issues between Python 2 and
|
8 |
+
Python 3.
|
9 |
+
"""
|
10 |
+
|
11 |
+
from pip._vendor import chardet
|
12 |
+
|
13 |
+
import sys
|
14 |
+
|
15 |
+
# -------
|
16 |
+
# Pythons
|
17 |
+
# -------
|
18 |
+
|
19 |
+
# Syntax sugar.
|
20 |
+
_ver = sys.version_info
|
21 |
+
|
22 |
+
#: Python 2.x?
|
23 |
+
is_py2 = (_ver[0] == 2)
|
24 |
+
|
25 |
+
#: Python 3.x?
|
26 |
+
is_py3 = (_ver[0] == 3)
|
27 |
+
|
28 |
+
# Note: We've patched out simplejson support in pip because it prevents
|
29 |
+
# upgrading simplejson on Windows.
|
30 |
+
# try:
|
31 |
+
# import simplejson as json
|
32 |
+
# except (ImportError, SyntaxError):
|
33 |
+
# # simplejson does not support Python 3.2, it throws a SyntaxError
|
34 |
+
# # because of u'...' Unicode literals.
|
35 |
+
import json
|
36 |
+
|
37 |
+
# ---------
|
38 |
+
# Specifics
|
39 |
+
# ---------
|
40 |
+
|
41 |
+
if is_py2:
|
42 |
+
from urllib import (
|
43 |
+
quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
|
44 |
+
proxy_bypass, proxy_bypass_environment, getproxies_environment)
|
45 |
+
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
|
46 |
+
from urllib2 import parse_http_list
|
47 |
+
import cookielib
|
48 |
+
from Cookie import Morsel
|
49 |
+
from StringIO import StringIO
|
50 |
+
# Keep OrderedDict for backwards compatibility.
|
51 |
+
from collections import Callable, Mapping, MutableMapping, OrderedDict
|
52 |
+
|
53 |
+
builtin_str = str
|
54 |
+
bytes = str
|
55 |
+
str = unicode
|
56 |
+
basestring = basestring
|
57 |
+
numeric_types = (int, long, float)
|
58 |
+
integer_types = (int, long)
|
59 |
+
JSONDecodeError = ValueError
|
60 |
+
|
61 |
+
elif is_py3:
|
62 |
+
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
|
63 |
+
from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
|
64 |
+
from http import cookiejar as cookielib
|
65 |
+
from http.cookies import Morsel
|
66 |
+
from io import StringIO
|
67 |
+
# Keep OrderedDict for backwards compatibility.
|
68 |
+
from collections import OrderedDict
|
69 |
+
from collections.abc import Callable, Mapping, MutableMapping
|
70 |
+
from json import JSONDecodeError
|
71 |
+
|
72 |
+
builtin_str = str
|
73 |
+
str = str
|
74 |
+
bytes = bytes
|
75 |
+
basestring = (str, bytes)
|
76 |
+
numeric_types = (int, float)
|
77 |
+
integer_types = (int,)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/cookies.py
ADDED
@@ -0,0 +1,549 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.cookies
|
5 |
+
~~~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
Compatibility code to be able to use `cookielib.CookieJar` with requests.
|
8 |
+
|
9 |
+
requests.utils imports from here, so be careful with imports.
|
10 |
+
"""
|
11 |
+
|
12 |
+
import copy
|
13 |
+
import time
|
14 |
+
import calendar
|
15 |
+
|
16 |
+
from ._internal_utils import to_native_string
|
17 |
+
from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
|
18 |
+
|
19 |
+
try:
|
20 |
+
import threading
|
21 |
+
except ImportError:
|
22 |
+
import dummy_threading as threading
|
23 |
+
|
24 |
+
|
25 |
+
class MockRequest(object):
|
26 |
+
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
|
27 |
+
|
28 |
+
The code in `cookielib.CookieJar` expects this interface in order to correctly
|
29 |
+
manage cookie policies, i.e., determine whether a cookie can be set, given the
|
30 |
+
domains of the request and the cookie.
|
31 |
+
|
32 |
+
The original request object is read-only. The client is responsible for collecting
|
33 |
+
the new headers via `get_new_headers()` and interpreting them appropriately. You
|
34 |
+
probably want `get_cookie_header`, defined below.
|
35 |
+
"""
|
36 |
+
|
37 |
+
def __init__(self, request):
|
38 |
+
self._r = request
|
39 |
+
self._new_headers = {}
|
40 |
+
self.type = urlparse(self._r.url).scheme
|
41 |
+
|
42 |
+
def get_type(self):
|
43 |
+
return self.type
|
44 |
+
|
45 |
+
def get_host(self):
|
46 |
+
return urlparse(self._r.url).netloc
|
47 |
+
|
48 |
+
def get_origin_req_host(self):
|
49 |
+
return self.get_host()
|
50 |
+
|
51 |
+
def get_full_url(self):
|
52 |
+
# Only return the response's URL if the user hadn't set the Host
|
53 |
+
# header
|
54 |
+
if not self._r.headers.get('Host'):
|
55 |
+
return self._r.url
|
56 |
+
# If they did set it, retrieve it and reconstruct the expected domain
|
57 |
+
host = to_native_string(self._r.headers['Host'], encoding='utf-8')
|
58 |
+
parsed = urlparse(self._r.url)
|
59 |
+
# Reconstruct the URL as we expect it
|
60 |
+
return urlunparse([
|
61 |
+
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
|
62 |
+
parsed.fragment
|
63 |
+
])
|
64 |
+
|
65 |
+
def is_unverifiable(self):
|
66 |
+
return True
|
67 |
+
|
68 |
+
def has_header(self, name):
|
69 |
+
return name in self._r.headers or name in self._new_headers
|
70 |
+
|
71 |
+
def get_header(self, name, default=None):
|
72 |
+
return self._r.headers.get(name, self._new_headers.get(name, default))
|
73 |
+
|
74 |
+
def add_header(self, key, val):
|
75 |
+
"""cookielib has no legitimate use for this method; add it back if you find one."""
|
76 |
+
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
|
77 |
+
|
78 |
+
def add_unredirected_header(self, name, value):
|
79 |
+
self._new_headers[name] = value
|
80 |
+
|
81 |
+
def get_new_headers(self):
|
82 |
+
return self._new_headers
|
83 |
+
|
84 |
+
@property
|
85 |
+
def unverifiable(self):
|
86 |
+
return self.is_unverifiable()
|
87 |
+
|
88 |
+
@property
|
89 |
+
def origin_req_host(self):
|
90 |
+
return self.get_origin_req_host()
|
91 |
+
|
92 |
+
@property
|
93 |
+
def host(self):
|
94 |
+
return self.get_host()
|
95 |
+
|
96 |
+
|
97 |
+
class MockResponse(object):
|
98 |
+
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
|
99 |
+
|
100 |
+
...what? Basically, expose the parsed HTTP headers from the server response
|
101 |
+
the way `cookielib` expects to see them.
|
102 |
+
"""
|
103 |
+
|
104 |
+
def __init__(self, headers):
|
105 |
+
"""Make a MockResponse for `cookielib` to read.
|
106 |
+
|
107 |
+
:param headers: a httplib.HTTPMessage or analogous carrying the headers
|
108 |
+
"""
|
109 |
+
self._headers = headers
|
110 |
+
|
111 |
+
def info(self):
|
112 |
+
return self._headers
|
113 |
+
|
114 |
+
def getheaders(self, name):
|
115 |
+
self._headers.getheaders(name)
|
116 |
+
|
117 |
+
|
118 |
+
def extract_cookies_to_jar(jar, request, response):
|
119 |
+
"""Extract the cookies from the response into a CookieJar.
|
120 |
+
|
121 |
+
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
|
122 |
+
:param request: our own requests.Request object
|
123 |
+
:param response: urllib3.HTTPResponse object
|
124 |
+
"""
|
125 |
+
if not (hasattr(response, '_original_response') and
|
126 |
+
response._original_response):
|
127 |
+
return
|
128 |
+
# the _original_response field is the wrapped httplib.HTTPResponse object,
|
129 |
+
req = MockRequest(request)
|
130 |
+
# pull out the HTTPMessage with the headers and put it in the mock:
|
131 |
+
res = MockResponse(response._original_response.msg)
|
132 |
+
jar.extract_cookies(res, req)
|
133 |
+
|
134 |
+
|
135 |
+
def get_cookie_header(jar, request):
|
136 |
+
"""
|
137 |
+
Produce an appropriate Cookie header string to be sent with `request`, or None.
|
138 |
+
|
139 |
+
:rtype: str
|
140 |
+
"""
|
141 |
+
r = MockRequest(request)
|
142 |
+
jar.add_cookie_header(r)
|
143 |
+
return r.get_new_headers().get('Cookie')
|
144 |
+
|
145 |
+
|
146 |
+
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
|
147 |
+
"""Unsets a cookie by name, by default over all domains and paths.
|
148 |
+
|
149 |
+
Wraps CookieJar.clear(), is O(n).
|
150 |
+
"""
|
151 |
+
clearables = []
|
152 |
+
for cookie in cookiejar:
|
153 |
+
if cookie.name != name:
|
154 |
+
continue
|
155 |
+
if domain is not None and domain != cookie.domain:
|
156 |
+
continue
|
157 |
+
if path is not None and path != cookie.path:
|
158 |
+
continue
|
159 |
+
clearables.append((cookie.domain, cookie.path, cookie.name))
|
160 |
+
|
161 |
+
for domain, path, name in clearables:
|
162 |
+
cookiejar.clear(domain, path, name)
|
163 |
+
|
164 |
+
|
165 |
+
class CookieConflictError(RuntimeError):
|
166 |
+
"""There are two cookies that meet the criteria specified in the cookie jar.
|
167 |
+
Use .get and .set and include domain and path args in order to be more specific.
|
168 |
+
"""
|
169 |
+
|
170 |
+
|
171 |
+
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
172 |
+
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
|
173 |
+
interface.
|
174 |
+
|
175 |
+
This is the CookieJar we create by default for requests and sessions that
|
176 |
+
don't specify one, since some clients may expect response.cookies and
|
177 |
+
session.cookies to support dict operations.
|
178 |
+
|
179 |
+
Requests does not use the dict interface internally; it's just for
|
180 |
+
compatibility with external client code. All requests code should work
|
181 |
+
out of the box with externally provided instances of ``CookieJar``, e.g.
|
182 |
+
``LWPCookieJar`` and ``FileCookieJar``.
|
183 |
+
|
184 |
+
Unlike a regular CookieJar, this class is pickleable.
|
185 |
+
|
186 |
+
.. warning:: dictionary operations that are normally O(1) may be O(n).
|
187 |
+
"""
|
188 |
+
|
189 |
+
def get(self, name, default=None, domain=None, path=None):
|
190 |
+
"""Dict-like get() that also supports optional domain and path args in
|
191 |
+
order to resolve naming collisions from using one cookie jar over
|
192 |
+
multiple domains.
|
193 |
+
|
194 |
+
.. warning:: operation is O(n), not O(1).
|
195 |
+
"""
|
196 |
+
try:
|
197 |
+
return self._find_no_duplicates(name, domain, path)
|
198 |
+
except KeyError:
|
199 |
+
return default
|
200 |
+
|
201 |
+
def set(self, name, value, **kwargs):
|
202 |
+
"""Dict-like set() that also supports optional domain and path args in
|
203 |
+
order to resolve naming collisions from using one cookie jar over
|
204 |
+
multiple domains.
|
205 |
+
"""
|
206 |
+
# support client code that unsets cookies by assignment of a None value:
|
207 |
+
if value is None:
|
208 |
+
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
|
209 |
+
return
|
210 |
+
|
211 |
+
if isinstance(value, Morsel):
|
212 |
+
c = morsel_to_cookie(value)
|
213 |
+
else:
|
214 |
+
c = create_cookie(name, value, **kwargs)
|
215 |
+
self.set_cookie(c)
|
216 |
+
return c
|
217 |
+
|
218 |
+
def iterkeys(self):
|
219 |
+
"""Dict-like iterkeys() that returns an iterator of names of cookies
|
220 |
+
from the jar.
|
221 |
+
|
222 |
+
.. seealso:: itervalues() and iteritems().
|
223 |
+
"""
|
224 |
+
for cookie in iter(self):
|
225 |
+
yield cookie.name
|
226 |
+
|
227 |
+
def keys(self):
|
228 |
+
"""Dict-like keys() that returns a list of names of cookies from the
|
229 |
+
jar.
|
230 |
+
|
231 |
+
.. seealso:: values() and items().
|
232 |
+
"""
|
233 |
+
return list(self.iterkeys())
|
234 |
+
|
235 |
+
def itervalues(self):
|
236 |
+
"""Dict-like itervalues() that returns an iterator of values of cookies
|
237 |
+
from the jar.
|
238 |
+
|
239 |
+
.. seealso:: iterkeys() and iteritems().
|
240 |
+
"""
|
241 |
+
for cookie in iter(self):
|
242 |
+
yield cookie.value
|
243 |
+
|
244 |
+
def values(self):
|
245 |
+
"""Dict-like values() that returns a list of values of cookies from the
|
246 |
+
jar.
|
247 |
+
|
248 |
+
.. seealso:: keys() and items().
|
249 |
+
"""
|
250 |
+
return list(self.itervalues())
|
251 |
+
|
252 |
+
def iteritems(self):
|
253 |
+
"""Dict-like iteritems() that returns an iterator of name-value tuples
|
254 |
+
from the jar.
|
255 |
+
|
256 |
+
.. seealso:: iterkeys() and itervalues().
|
257 |
+
"""
|
258 |
+
for cookie in iter(self):
|
259 |
+
yield cookie.name, cookie.value
|
260 |
+
|
261 |
+
def items(self):
|
262 |
+
"""Dict-like items() that returns a list of name-value tuples from the
|
263 |
+
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
|
264 |
+
vanilla python dict of key value pairs.
|
265 |
+
|
266 |
+
.. seealso:: keys() and values().
|
267 |
+
"""
|
268 |
+
return list(self.iteritems())
|
269 |
+
|
270 |
+
def list_domains(self):
|
271 |
+
"""Utility method to list all the domains in the jar."""
|
272 |
+
domains = []
|
273 |
+
for cookie in iter(self):
|
274 |
+
if cookie.domain not in domains:
|
275 |
+
domains.append(cookie.domain)
|
276 |
+
return domains
|
277 |
+
|
278 |
+
def list_paths(self):
|
279 |
+
"""Utility method to list all the paths in the jar."""
|
280 |
+
paths = []
|
281 |
+
for cookie in iter(self):
|
282 |
+
if cookie.path not in paths:
|
283 |
+
paths.append(cookie.path)
|
284 |
+
return paths
|
285 |
+
|
286 |
+
def multiple_domains(self):
|
287 |
+
"""Returns True if there are multiple domains in the jar.
|
288 |
+
Returns False otherwise.
|
289 |
+
|
290 |
+
:rtype: bool
|
291 |
+
"""
|
292 |
+
domains = []
|
293 |
+
for cookie in iter(self):
|
294 |
+
if cookie.domain is not None and cookie.domain in domains:
|
295 |
+
return True
|
296 |
+
domains.append(cookie.domain)
|
297 |
+
return False # there is only one domain in jar
|
298 |
+
|
299 |
+
def get_dict(self, domain=None, path=None):
|
300 |
+
"""Takes as an argument an optional domain and path and returns a plain
|
301 |
+
old Python dict of name-value pairs of cookies that meet the
|
302 |
+
requirements.
|
303 |
+
|
304 |
+
:rtype: dict
|
305 |
+
"""
|
306 |
+
dictionary = {}
|
307 |
+
for cookie in iter(self):
|
308 |
+
if (
|
309 |
+
(domain is None or cookie.domain == domain) and
|
310 |
+
(path is None or cookie.path == path)
|
311 |
+
):
|
312 |
+
dictionary[cookie.name] = cookie.value
|
313 |
+
return dictionary
|
314 |
+
|
315 |
+
def __contains__(self, name):
|
316 |
+
try:
|
317 |
+
return super(RequestsCookieJar, self).__contains__(name)
|
318 |
+
except CookieConflictError:
|
319 |
+
return True
|
320 |
+
|
321 |
+
def __getitem__(self, name):
|
322 |
+
"""Dict-like __getitem__() for compatibility with client code. Throws
|
323 |
+
exception if there are more than one cookie with name. In that case,
|
324 |
+
use the more explicit get() method instead.
|
325 |
+
|
326 |
+
.. warning:: operation is O(n), not O(1).
|
327 |
+
"""
|
328 |
+
return self._find_no_duplicates(name)
|
329 |
+
|
330 |
+
def __setitem__(self, name, value):
|
331 |
+
"""Dict-like __setitem__ for compatibility with client code. Throws
|
332 |
+
exception if there is already a cookie of that name in the jar. In that
|
333 |
+
case, use the more explicit set() method instead.
|
334 |
+
"""
|
335 |
+
self.set(name, value)
|
336 |
+
|
337 |
+
def __delitem__(self, name):
|
338 |
+
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
|
339 |
+
``remove_cookie_by_name()``.
|
340 |
+
"""
|
341 |
+
remove_cookie_by_name(self, name)
|
342 |
+
|
343 |
+
def set_cookie(self, cookie, *args, **kwargs):
|
344 |
+
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
|
345 |
+
cookie.value = cookie.value.replace('\\"', '')
|
346 |
+
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
|
347 |
+
|
348 |
+
def update(self, other):
|
349 |
+
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
350 |
+
if isinstance(other, cookielib.CookieJar):
|
351 |
+
for cookie in other:
|
352 |
+
self.set_cookie(copy.copy(cookie))
|
353 |
+
else:
|
354 |
+
super(RequestsCookieJar, self).update(other)
|
355 |
+
|
356 |
+
def _find(self, name, domain=None, path=None):
|
357 |
+
"""Requests uses this method internally to get cookie values.
|
358 |
+
|
359 |
+
If there are conflicting cookies, _find arbitrarily chooses one.
|
360 |
+
See _find_no_duplicates if you want an exception thrown if there are
|
361 |
+
conflicting cookies.
|
362 |
+
|
363 |
+
:param name: a string containing name of cookie
|
364 |
+
:param domain: (optional) string containing domain of cookie
|
365 |
+
:param path: (optional) string containing path of cookie
|
366 |
+
:return: cookie.value
|
367 |
+
"""
|
368 |
+
for cookie in iter(self):
|
369 |
+
if cookie.name == name:
|
370 |
+
if domain is None or cookie.domain == domain:
|
371 |
+
if path is None or cookie.path == path:
|
372 |
+
return cookie.value
|
373 |
+
|
374 |
+
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
|
375 |
+
|
376 |
+
def _find_no_duplicates(self, name, domain=None, path=None):
|
377 |
+
"""Both ``__get_item__`` and ``get`` call this function: it's never
|
378 |
+
used elsewhere in Requests.
|
379 |
+
|
380 |
+
:param name: a string containing name of cookie
|
381 |
+
:param domain: (optional) string containing domain of cookie
|
382 |
+
:param path: (optional) string containing path of cookie
|
383 |
+
:raises KeyError: if cookie is not found
|
384 |
+
:raises CookieConflictError: if there are multiple cookies
|
385 |
+
that match name and optionally domain and path
|
386 |
+
:return: cookie.value
|
387 |
+
"""
|
388 |
+
toReturn = None
|
389 |
+
for cookie in iter(self):
|
390 |
+
if cookie.name == name:
|
391 |
+
if domain is None or cookie.domain == domain:
|
392 |
+
if path is None or cookie.path == path:
|
393 |
+
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
|
394 |
+
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
|
395 |
+
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
|
396 |
+
|
397 |
+
if toReturn:
|
398 |
+
return toReturn
|
399 |
+
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
|
400 |
+
|
401 |
+
def __getstate__(self):
|
402 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
403 |
+
state = self.__dict__.copy()
|
404 |
+
# remove the unpickleable RLock object
|
405 |
+
state.pop('_cookies_lock')
|
406 |
+
return state
|
407 |
+
|
408 |
+
def __setstate__(self, state):
|
409 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
410 |
+
self.__dict__.update(state)
|
411 |
+
if '_cookies_lock' not in self.__dict__:
|
412 |
+
self._cookies_lock = threading.RLock()
|
413 |
+
|
414 |
+
def copy(self):
|
415 |
+
"""Return a copy of this RequestsCookieJar."""
|
416 |
+
new_cj = RequestsCookieJar()
|
417 |
+
new_cj.set_policy(self.get_policy())
|
418 |
+
new_cj.update(self)
|
419 |
+
return new_cj
|
420 |
+
|
421 |
+
def get_policy(self):
|
422 |
+
"""Return the CookiePolicy instance used."""
|
423 |
+
return self._policy
|
424 |
+
|
425 |
+
|
426 |
+
def _copy_cookie_jar(jar):
|
427 |
+
if jar is None:
|
428 |
+
return None
|
429 |
+
|
430 |
+
if hasattr(jar, 'copy'):
|
431 |
+
# We're dealing with an instance of RequestsCookieJar
|
432 |
+
return jar.copy()
|
433 |
+
# We're dealing with a generic CookieJar instance
|
434 |
+
new_jar = copy.copy(jar)
|
435 |
+
new_jar.clear()
|
436 |
+
for cookie in jar:
|
437 |
+
new_jar.set_cookie(copy.copy(cookie))
|
438 |
+
return new_jar
|
439 |
+
|
440 |
+
|
441 |
+
def create_cookie(name, value, **kwargs):
|
442 |
+
"""Make a cookie from underspecified parameters.
|
443 |
+
|
444 |
+
By default, the pair of `name` and `value` will be set for the domain ''
|
445 |
+
and sent on every request (this is sometimes called a "supercookie").
|
446 |
+
"""
|
447 |
+
result = {
|
448 |
+
'version': 0,
|
449 |
+
'name': name,
|
450 |
+
'value': value,
|
451 |
+
'port': None,
|
452 |
+
'domain': '',
|
453 |
+
'path': '/',
|
454 |
+
'secure': False,
|
455 |
+
'expires': None,
|
456 |
+
'discard': True,
|
457 |
+
'comment': None,
|
458 |
+
'comment_url': None,
|
459 |
+
'rest': {'HttpOnly': None},
|
460 |
+
'rfc2109': False,
|
461 |
+
}
|
462 |
+
|
463 |
+
badargs = set(kwargs) - set(result)
|
464 |
+
if badargs:
|
465 |
+
err = 'create_cookie() got unexpected keyword arguments: %s'
|
466 |
+
raise TypeError(err % list(badargs))
|
467 |
+
|
468 |
+
result.update(kwargs)
|
469 |
+
result['port_specified'] = bool(result['port'])
|
470 |
+
result['domain_specified'] = bool(result['domain'])
|
471 |
+
result['domain_initial_dot'] = result['domain'].startswith('.')
|
472 |
+
result['path_specified'] = bool(result['path'])
|
473 |
+
|
474 |
+
return cookielib.Cookie(**result)
|
475 |
+
|
476 |
+
|
477 |
+
def morsel_to_cookie(morsel):
|
478 |
+
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
|
479 |
+
|
480 |
+
expires = None
|
481 |
+
if morsel['max-age']:
|
482 |
+
try:
|
483 |
+
expires = int(time.time() + int(morsel['max-age']))
|
484 |
+
except ValueError:
|
485 |
+
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
|
486 |
+
elif morsel['expires']:
|
487 |
+
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
|
488 |
+
expires = calendar.timegm(
|
489 |
+
time.strptime(morsel['expires'], time_template)
|
490 |
+
)
|
491 |
+
return create_cookie(
|
492 |
+
comment=morsel['comment'],
|
493 |
+
comment_url=bool(morsel['comment']),
|
494 |
+
discard=False,
|
495 |
+
domain=morsel['domain'],
|
496 |
+
expires=expires,
|
497 |
+
name=morsel.key,
|
498 |
+
path=morsel['path'],
|
499 |
+
port=None,
|
500 |
+
rest={'HttpOnly': morsel['httponly']},
|
501 |
+
rfc2109=False,
|
502 |
+
secure=bool(morsel['secure']),
|
503 |
+
value=morsel.value,
|
504 |
+
version=morsel['version'] or 0,
|
505 |
+
)
|
506 |
+
|
507 |
+
|
508 |
+
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
|
509 |
+
"""Returns a CookieJar from a key/value dictionary.
|
510 |
+
|
511 |
+
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
512 |
+
:param cookiejar: (optional) A cookiejar to add the cookies to.
|
513 |
+
:param overwrite: (optional) If False, will not replace cookies
|
514 |
+
already in the jar with new ones.
|
515 |
+
:rtype: CookieJar
|
516 |
+
"""
|
517 |
+
if cookiejar is None:
|
518 |
+
cookiejar = RequestsCookieJar()
|
519 |
+
|
520 |
+
if cookie_dict is not None:
|
521 |
+
names_from_jar = [cookie.name for cookie in cookiejar]
|
522 |
+
for name in cookie_dict:
|
523 |
+
if overwrite or (name not in names_from_jar):
|
524 |
+
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
|
525 |
+
|
526 |
+
return cookiejar
|
527 |
+
|
528 |
+
|
529 |
+
def merge_cookies(cookiejar, cookies):
|
530 |
+
"""Add cookies to cookiejar and returns a merged CookieJar.
|
531 |
+
|
532 |
+
:param cookiejar: CookieJar object to add the cookies to.
|
533 |
+
:param cookies: Dictionary or CookieJar object to be added.
|
534 |
+
:rtype: CookieJar
|
535 |
+
"""
|
536 |
+
if not isinstance(cookiejar, cookielib.CookieJar):
|
537 |
+
raise ValueError('You can only merge into CookieJar')
|
538 |
+
|
539 |
+
if isinstance(cookies, dict):
|
540 |
+
cookiejar = cookiejar_from_dict(
|
541 |
+
cookies, cookiejar=cookiejar, overwrite=False)
|
542 |
+
elif isinstance(cookies, cookielib.CookieJar):
|
543 |
+
try:
|
544 |
+
cookiejar.update(cookies)
|
545 |
+
except AttributeError:
|
546 |
+
for cookie_in_jar in cookies:
|
547 |
+
cookiejar.set_cookie(cookie_in_jar)
|
548 |
+
|
549 |
+
return cookiejar
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/exceptions.py
ADDED
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.exceptions
|
5 |
+
~~~~~~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module contains the set of Requests' exceptions.
|
8 |
+
"""
|
9 |
+
from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError
|
10 |
+
|
11 |
+
from .compat import JSONDecodeError as CompatJSONDecodeError
|
12 |
+
|
13 |
+
|
14 |
+
class RequestException(IOError):
|
15 |
+
"""There was an ambiguous exception that occurred while handling your
|
16 |
+
request.
|
17 |
+
"""
|
18 |
+
|
19 |
+
def __init__(self, *args, **kwargs):
|
20 |
+
"""Initialize RequestException with `request` and `response` objects."""
|
21 |
+
response = kwargs.pop('response', None)
|
22 |
+
self.response = response
|
23 |
+
self.request = kwargs.pop('request', None)
|
24 |
+
if (response is not None and not self.request and
|
25 |
+
hasattr(response, 'request')):
|
26 |
+
self.request = self.response.request
|
27 |
+
super(RequestException, self).__init__(*args, **kwargs)
|
28 |
+
|
29 |
+
|
30 |
+
class InvalidJSONError(RequestException):
|
31 |
+
"""A JSON error occurred."""
|
32 |
+
|
33 |
+
|
34 |
+
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
|
35 |
+
"""Couldn't decode the text into json"""
|
36 |
+
|
37 |
+
|
38 |
+
class HTTPError(RequestException):
|
39 |
+
"""An HTTP error occurred."""
|
40 |
+
|
41 |
+
|
42 |
+
class ConnectionError(RequestException):
|
43 |
+
"""A Connection error occurred."""
|
44 |
+
|
45 |
+
|
46 |
+
class ProxyError(ConnectionError):
|
47 |
+
"""A proxy error occurred."""
|
48 |
+
|
49 |
+
|
50 |
+
class SSLError(ConnectionError):
|
51 |
+
"""An SSL error occurred."""
|
52 |
+
|
53 |
+
|
54 |
+
class Timeout(RequestException):
|
55 |
+
"""The request timed out.
|
56 |
+
|
57 |
+
Catching this error will catch both
|
58 |
+
:exc:`~requests.exceptions.ConnectTimeout` and
|
59 |
+
:exc:`~requests.exceptions.ReadTimeout` errors.
|
60 |
+
"""
|
61 |
+
|
62 |
+
|
63 |
+
class ConnectTimeout(ConnectionError, Timeout):
|
64 |
+
"""The request timed out while trying to connect to the remote server.
|
65 |
+
|
66 |
+
Requests that produced this error are safe to retry.
|
67 |
+
"""
|
68 |
+
|
69 |
+
|
70 |
+
class ReadTimeout(Timeout):
|
71 |
+
"""The server did not send any data in the allotted amount of time."""
|
72 |
+
|
73 |
+
|
74 |
+
class URLRequired(RequestException):
|
75 |
+
"""A valid URL is required to make a request."""
|
76 |
+
|
77 |
+
|
78 |
+
class TooManyRedirects(RequestException):
|
79 |
+
"""Too many redirects."""
|
80 |
+
|
81 |
+
|
82 |
+
class MissingSchema(RequestException, ValueError):
|
83 |
+
"""The URL scheme (e.g. http or https) is missing."""
|
84 |
+
|
85 |
+
|
86 |
+
class InvalidSchema(RequestException, ValueError):
|
87 |
+
"""The URL scheme provided is either invalid or unsupported."""
|
88 |
+
|
89 |
+
|
90 |
+
class InvalidURL(RequestException, ValueError):
|
91 |
+
"""The URL provided was somehow invalid."""
|
92 |
+
|
93 |
+
|
94 |
+
class InvalidHeader(RequestException, ValueError):
|
95 |
+
"""The header value provided was somehow invalid."""
|
96 |
+
|
97 |
+
|
98 |
+
class InvalidProxyURL(InvalidURL):
|
99 |
+
"""The proxy URL provided is invalid."""
|
100 |
+
|
101 |
+
|
102 |
+
class ChunkedEncodingError(RequestException):
|
103 |
+
"""The server declared chunked encoding but sent an invalid chunk."""
|
104 |
+
|
105 |
+
|
106 |
+
class ContentDecodingError(RequestException, BaseHTTPError):
|
107 |
+
"""Failed to decode response content."""
|
108 |
+
|
109 |
+
|
110 |
+
class StreamConsumedError(RequestException, TypeError):
|
111 |
+
"""The content for this response was already consumed."""
|
112 |
+
|
113 |
+
|
114 |
+
class RetryError(RequestException):
|
115 |
+
"""Custom retries logic failed"""
|
116 |
+
|
117 |
+
|
118 |
+
class UnrewindableBodyError(RequestException):
|
119 |
+
"""Requests encountered an error when trying to rewind a body."""
|
120 |
+
|
121 |
+
# Warnings
|
122 |
+
|
123 |
+
|
124 |
+
class RequestsWarning(Warning):
|
125 |
+
"""Base warning for Requests."""
|
126 |
+
|
127 |
+
|
128 |
+
class FileModeWarning(RequestsWarning, DeprecationWarning):
|
129 |
+
"""A file was opened in text mode, but Requests determined its binary length."""
|
130 |
+
|
131 |
+
|
132 |
+
class RequestsDependencyWarning(RequestsWarning):
|
133 |
+
"""An imported dependency doesn't match the expected version range."""
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/help.py
ADDED
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Module containing bug report helper(s)."""
|
2 |
+
from __future__ import print_function
|
3 |
+
|
4 |
+
import json
|
5 |
+
import platform
|
6 |
+
import sys
|
7 |
+
import ssl
|
8 |
+
|
9 |
+
from pip._vendor import idna
|
10 |
+
from pip._vendor import urllib3
|
11 |
+
|
12 |
+
from . import __version__ as requests_version
|
13 |
+
|
14 |
+
charset_normalizer = None
|
15 |
+
|
16 |
+
try:
|
17 |
+
from pip._vendor import chardet
|
18 |
+
except ImportError:
|
19 |
+
chardet = None
|
20 |
+
|
21 |
+
try:
|
22 |
+
from pip._vendor.urllib3.contrib import pyopenssl
|
23 |
+
except ImportError:
|
24 |
+
pyopenssl = None
|
25 |
+
OpenSSL = None
|
26 |
+
cryptography = None
|
27 |
+
else:
|
28 |
+
import OpenSSL
|
29 |
+
import cryptography
|
30 |
+
|
31 |
+
|
32 |
+
def _implementation():
|
33 |
+
"""Return a dict with the Python implementation and version.
|
34 |
+
|
35 |
+
Provide both the name and the version of the Python implementation
|
36 |
+
currently running. For example, on CPython 2.7.5 it will return
|
37 |
+
{'name': 'CPython', 'version': '2.7.5'}.
|
38 |
+
|
39 |
+
This function works best on CPython and PyPy: in particular, it probably
|
40 |
+
doesn't work for Jython or IronPython. Future investigation should be done
|
41 |
+
to work out the correct shape of the code for those platforms.
|
42 |
+
"""
|
43 |
+
implementation = platform.python_implementation()
|
44 |
+
|
45 |
+
if implementation == 'CPython':
|
46 |
+
implementation_version = platform.python_version()
|
47 |
+
elif implementation == 'PyPy':
|
48 |
+
implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
|
49 |
+
sys.pypy_version_info.minor,
|
50 |
+
sys.pypy_version_info.micro)
|
51 |
+
if sys.pypy_version_info.releaselevel != 'final':
|
52 |
+
implementation_version = ''.join([
|
53 |
+
implementation_version, sys.pypy_version_info.releaselevel
|
54 |
+
])
|
55 |
+
elif implementation == 'Jython':
|
56 |
+
implementation_version = platform.python_version() # Complete Guess
|
57 |
+
elif implementation == 'IronPython':
|
58 |
+
implementation_version = platform.python_version() # Complete Guess
|
59 |
+
else:
|
60 |
+
implementation_version = 'Unknown'
|
61 |
+
|
62 |
+
return {'name': implementation, 'version': implementation_version}
|
63 |
+
|
64 |
+
|
65 |
+
def info():
|
66 |
+
"""Generate information for a bug report."""
|
67 |
+
try:
|
68 |
+
platform_info = {
|
69 |
+
'system': platform.system(),
|
70 |
+
'release': platform.release(),
|
71 |
+
}
|
72 |
+
except IOError:
|
73 |
+
platform_info = {
|
74 |
+
'system': 'Unknown',
|
75 |
+
'release': 'Unknown',
|
76 |
+
}
|
77 |
+
|
78 |
+
implementation_info = _implementation()
|
79 |
+
urllib3_info = {'version': urllib3.__version__}
|
80 |
+
charset_normalizer_info = {'version': None}
|
81 |
+
chardet_info = {'version': None}
|
82 |
+
if charset_normalizer:
|
83 |
+
charset_normalizer_info = {'version': charset_normalizer.__version__}
|
84 |
+
if chardet:
|
85 |
+
chardet_info = {'version': chardet.__version__}
|
86 |
+
|
87 |
+
pyopenssl_info = {
|
88 |
+
'version': None,
|
89 |
+
'openssl_version': '',
|
90 |
+
}
|
91 |
+
if OpenSSL:
|
92 |
+
pyopenssl_info = {
|
93 |
+
'version': OpenSSL.__version__,
|
94 |
+
'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,
|
95 |
+
}
|
96 |
+
cryptography_info = {
|
97 |
+
'version': getattr(cryptography, '__version__', ''),
|
98 |
+
}
|
99 |
+
idna_info = {
|
100 |
+
'version': getattr(idna, '__version__', ''),
|
101 |
+
}
|
102 |
+
|
103 |
+
system_ssl = ssl.OPENSSL_VERSION_NUMBER
|
104 |
+
system_ssl_info = {
|
105 |
+
'version': '%x' % system_ssl if system_ssl is not None else ''
|
106 |
+
}
|
107 |
+
|
108 |
+
return {
|
109 |
+
'platform': platform_info,
|
110 |
+
'implementation': implementation_info,
|
111 |
+
'system_ssl': system_ssl_info,
|
112 |
+
'using_pyopenssl': pyopenssl is not None,
|
113 |
+
'using_charset_normalizer': chardet is None,
|
114 |
+
'pyOpenSSL': pyopenssl_info,
|
115 |
+
'urllib3': urllib3_info,
|
116 |
+
'chardet': chardet_info,
|
117 |
+
'charset_normalizer': charset_normalizer_info,
|
118 |
+
'cryptography': cryptography_info,
|
119 |
+
'idna': idna_info,
|
120 |
+
'requests': {
|
121 |
+
'version': requests_version,
|
122 |
+
},
|
123 |
+
}
|
124 |
+
|
125 |
+
|
126 |
+
def main():
|
127 |
+
"""Pretty-print the bug information as JSON."""
|
128 |
+
print(json.dumps(info(), sort_keys=True, indent=2))
|
129 |
+
|
130 |
+
|
131 |
+
if __name__ == '__main__':
|
132 |
+
main()
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/models.py
ADDED
@@ -0,0 +1,973 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.models
|
5 |
+
~~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module contains the primary objects that power Requests.
|
8 |
+
"""
|
9 |
+
|
10 |
+
import datetime
|
11 |
+
import sys
|
12 |
+
|
13 |
+
# Import encoding now, to avoid implicit import later.
|
14 |
+
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
|
15 |
+
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
|
16 |
+
import encodings.idna
|
17 |
+
|
18 |
+
from pip._vendor.urllib3.fields import RequestField
|
19 |
+
from pip._vendor.urllib3.filepost import encode_multipart_formdata
|
20 |
+
from pip._vendor.urllib3.util import parse_url
|
21 |
+
from pip._vendor.urllib3.exceptions import (
|
22 |
+
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
|
23 |
+
|
24 |
+
from io import UnsupportedOperation
|
25 |
+
from .hooks import default_hooks
|
26 |
+
from .structures import CaseInsensitiveDict
|
27 |
+
|
28 |
+
from .auth import HTTPBasicAuth
|
29 |
+
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
|
30 |
+
from .exceptions import (
|
31 |
+
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
|
32 |
+
ContentDecodingError, ConnectionError, StreamConsumedError,
|
33 |
+
InvalidJSONError)
|
34 |
+
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
|
35 |
+
from ._internal_utils import to_native_string, unicode_is_ascii
|
36 |
+
from .utils import (
|
37 |
+
guess_filename, get_auth_from_url, requote_uri,
|
38 |
+
stream_decode_response_unicode, to_key_val_list, parse_header_links,
|
39 |
+
iter_slices, guess_json_utf, super_len, check_header_validity)
|
40 |
+
from .compat import (
|
41 |
+
Callable, Mapping,
|
42 |
+
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
|
43 |
+
is_py2, chardet, builtin_str, basestring, JSONDecodeError)
|
44 |
+
from .compat import json as complexjson
|
45 |
+
from .status_codes import codes
|
46 |
+
|
47 |
+
#: The set of HTTP status codes that indicate an automatically
|
48 |
+
#: processable redirect.
|
49 |
+
REDIRECT_STATI = (
|
50 |
+
codes.moved, # 301
|
51 |
+
codes.found, # 302
|
52 |
+
codes.other, # 303
|
53 |
+
codes.temporary_redirect, # 307
|
54 |
+
codes.permanent_redirect, # 308
|
55 |
+
)
|
56 |
+
|
57 |
+
DEFAULT_REDIRECT_LIMIT = 30
|
58 |
+
CONTENT_CHUNK_SIZE = 10 * 1024
|
59 |
+
ITER_CHUNK_SIZE = 512
|
60 |
+
|
61 |
+
|
62 |
+
class RequestEncodingMixin(object):
|
63 |
+
@property
|
64 |
+
def path_url(self):
|
65 |
+
"""Build the path URL to use."""
|
66 |
+
|
67 |
+
url = []
|
68 |
+
|
69 |
+
p = urlsplit(self.url)
|
70 |
+
|
71 |
+
path = p.path
|
72 |
+
if not path:
|
73 |
+
path = '/'
|
74 |
+
|
75 |
+
url.append(path)
|
76 |
+
|
77 |
+
query = p.query
|
78 |
+
if query:
|
79 |
+
url.append('?')
|
80 |
+
url.append(query)
|
81 |
+
|
82 |
+
return ''.join(url)
|
83 |
+
|
84 |
+
@staticmethod
|
85 |
+
def _encode_params(data):
|
86 |
+
"""Encode parameters in a piece of data.
|
87 |
+
|
88 |
+
Will successfully encode parameters when passed as a dict or a list of
|
89 |
+
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
|
90 |
+
if parameters are supplied as a dict.
|
91 |
+
"""
|
92 |
+
|
93 |
+
if isinstance(data, (str, bytes)):
|
94 |
+
return data
|
95 |
+
elif hasattr(data, 'read'):
|
96 |
+
return data
|
97 |
+
elif hasattr(data, '__iter__'):
|
98 |
+
result = []
|
99 |
+
for k, vs in to_key_val_list(data):
|
100 |
+
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
|
101 |
+
vs = [vs]
|
102 |
+
for v in vs:
|
103 |
+
if v is not None:
|
104 |
+
result.append(
|
105 |
+
(k.encode('utf-8') if isinstance(k, str) else k,
|
106 |
+
v.encode('utf-8') if isinstance(v, str) else v))
|
107 |
+
return urlencode(result, doseq=True)
|
108 |
+
else:
|
109 |
+
return data
|
110 |
+
|
111 |
+
@staticmethod
|
112 |
+
def _encode_files(files, data):
|
113 |
+
"""Build the body for a multipart/form-data request.
|
114 |
+
|
115 |
+
Will successfully encode files when passed as a dict or a list of
|
116 |
+
tuples. Order is retained if data is a list of tuples but arbitrary
|
117 |
+
if parameters are supplied as a dict.
|
118 |
+
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
|
119 |
+
or 4-tuples (filename, fileobj, contentype, custom_headers).
|
120 |
+
"""
|
121 |
+
if (not files):
|
122 |
+
raise ValueError("Files must be provided.")
|
123 |
+
elif isinstance(data, basestring):
|
124 |
+
raise ValueError("Data must not be a string.")
|
125 |
+
|
126 |
+
new_fields = []
|
127 |
+
fields = to_key_val_list(data or {})
|
128 |
+
files = to_key_val_list(files or {})
|
129 |
+
|
130 |
+
for field, val in fields:
|
131 |
+
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
|
132 |
+
val = [val]
|
133 |
+
for v in val:
|
134 |
+
if v is not None:
|
135 |
+
# Don't call str() on bytestrings: in Py3 it all goes wrong.
|
136 |
+
if not isinstance(v, bytes):
|
137 |
+
v = str(v)
|
138 |
+
|
139 |
+
new_fields.append(
|
140 |
+
(field.decode('utf-8') if isinstance(field, bytes) else field,
|
141 |
+
v.encode('utf-8') if isinstance(v, str) else v))
|
142 |
+
|
143 |
+
for (k, v) in files:
|
144 |
+
# support for explicit filename
|
145 |
+
ft = None
|
146 |
+
fh = None
|
147 |
+
if isinstance(v, (tuple, list)):
|
148 |
+
if len(v) == 2:
|
149 |
+
fn, fp = v
|
150 |
+
elif len(v) == 3:
|
151 |
+
fn, fp, ft = v
|
152 |
+
else:
|
153 |
+
fn, fp, ft, fh = v
|
154 |
+
else:
|
155 |
+
fn = guess_filename(v) or k
|
156 |
+
fp = v
|
157 |
+
|
158 |
+
if isinstance(fp, (str, bytes, bytearray)):
|
159 |
+
fdata = fp
|
160 |
+
elif hasattr(fp, 'read'):
|
161 |
+
fdata = fp.read()
|
162 |
+
elif fp is None:
|
163 |
+
continue
|
164 |
+
else:
|
165 |
+
fdata = fp
|
166 |
+
|
167 |
+
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
|
168 |
+
rf.make_multipart(content_type=ft)
|
169 |
+
new_fields.append(rf)
|
170 |
+
|
171 |
+
body, content_type = encode_multipart_formdata(new_fields)
|
172 |
+
|
173 |
+
return body, content_type
|
174 |
+
|
175 |
+
|
176 |
+
class RequestHooksMixin(object):
|
177 |
+
def register_hook(self, event, hook):
|
178 |
+
"""Properly register a hook."""
|
179 |
+
|
180 |
+
if event not in self.hooks:
|
181 |
+
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
|
182 |
+
|
183 |
+
if isinstance(hook, Callable):
|
184 |
+
self.hooks[event].append(hook)
|
185 |
+
elif hasattr(hook, '__iter__'):
|
186 |
+
self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
|
187 |
+
|
188 |
+
def deregister_hook(self, event, hook):
|
189 |
+
"""Deregister a previously registered hook.
|
190 |
+
Returns True if the hook existed, False if not.
|
191 |
+
"""
|
192 |
+
|
193 |
+
try:
|
194 |
+
self.hooks[event].remove(hook)
|
195 |
+
return True
|
196 |
+
except ValueError:
|
197 |
+
return False
|
198 |
+
|
199 |
+
|
200 |
+
class Request(RequestHooksMixin):
|
201 |
+
"""A user-created :class:`Request <Request>` object.
|
202 |
+
|
203 |
+
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
|
204 |
+
|
205 |
+
:param method: HTTP method to use.
|
206 |
+
:param url: URL to send.
|
207 |
+
:param headers: dictionary of headers to send.
|
208 |
+
:param files: dictionary of {filename: fileobject} files to multipart upload.
|
209 |
+
:param data: the body to attach to the request. If a dictionary or
|
210 |
+
list of tuples ``[(key, value)]`` is provided, form-encoding will
|
211 |
+
take place.
|
212 |
+
:param json: json for the body to attach to the request (if files or data is not specified).
|
213 |
+
:param params: URL parameters to append to the URL. If a dictionary or
|
214 |
+
list of tuples ``[(key, value)]`` is provided, form-encoding will
|
215 |
+
take place.
|
216 |
+
:param auth: Auth handler or (user, pass) tuple.
|
217 |
+
:param cookies: dictionary or CookieJar of cookies to attach to this request.
|
218 |
+
:param hooks: dictionary of callback hooks, for internal usage.
|
219 |
+
|
220 |
+
Usage::
|
221 |
+
|
222 |
+
>>> import requests
|
223 |
+
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
224 |
+
>>> req.prepare()
|
225 |
+
<PreparedRequest [GET]>
|
226 |
+
"""
|
227 |
+
|
228 |
+
def __init__(self,
|
229 |
+
method=None, url=None, headers=None, files=None, data=None,
|
230 |
+
params=None, auth=None, cookies=None, hooks=None, json=None):
|
231 |
+
|
232 |
+
# Default empty dicts for dict params.
|
233 |
+
data = [] if data is None else data
|
234 |
+
files = [] if files is None else files
|
235 |
+
headers = {} if headers is None else headers
|
236 |
+
params = {} if params is None else params
|
237 |
+
hooks = {} if hooks is None else hooks
|
238 |
+
|
239 |
+
self.hooks = default_hooks()
|
240 |
+
for (k, v) in list(hooks.items()):
|
241 |
+
self.register_hook(event=k, hook=v)
|
242 |
+
|
243 |
+
self.method = method
|
244 |
+
self.url = url
|
245 |
+
self.headers = headers
|
246 |
+
self.files = files
|
247 |
+
self.data = data
|
248 |
+
self.json = json
|
249 |
+
self.params = params
|
250 |
+
self.auth = auth
|
251 |
+
self.cookies = cookies
|
252 |
+
|
253 |
+
def __repr__(self):
|
254 |
+
return '<Request [%s]>' % (self.method)
|
255 |
+
|
256 |
+
def prepare(self):
|
257 |
+
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
|
258 |
+
p = PreparedRequest()
|
259 |
+
p.prepare(
|
260 |
+
method=self.method,
|
261 |
+
url=self.url,
|
262 |
+
headers=self.headers,
|
263 |
+
files=self.files,
|
264 |
+
data=self.data,
|
265 |
+
json=self.json,
|
266 |
+
params=self.params,
|
267 |
+
auth=self.auth,
|
268 |
+
cookies=self.cookies,
|
269 |
+
hooks=self.hooks,
|
270 |
+
)
|
271 |
+
return p
|
272 |
+
|
273 |
+
|
274 |
+
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
275 |
+
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
|
276 |
+
containing the exact bytes that will be sent to the server.
|
277 |
+
|
278 |
+
Instances are generated from a :class:`Request <Request>` object, and
|
279 |
+
should not be instantiated manually; doing so may produce undesirable
|
280 |
+
effects.
|
281 |
+
|
282 |
+
Usage::
|
283 |
+
|
284 |
+
>>> import requests
|
285 |
+
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
286 |
+
>>> r = req.prepare()
|
287 |
+
>>> r
|
288 |
+
<PreparedRequest [GET]>
|
289 |
+
|
290 |
+
>>> s = requests.Session()
|
291 |
+
>>> s.send(r)
|
292 |
+
<Response [200]>
|
293 |
+
"""
|
294 |
+
|
295 |
+
def __init__(self):
|
296 |
+
#: HTTP verb to send to the server.
|
297 |
+
self.method = None
|
298 |
+
#: HTTP URL to send the request to.
|
299 |
+
self.url = None
|
300 |
+
#: dictionary of HTTP headers.
|
301 |
+
self.headers = None
|
302 |
+
# The `CookieJar` used to create the Cookie header will be stored here
|
303 |
+
# after prepare_cookies is called
|
304 |
+
self._cookies = None
|
305 |
+
#: request body to send to the server.
|
306 |
+
self.body = None
|
307 |
+
#: dictionary of callback hooks, for internal usage.
|
308 |
+
self.hooks = default_hooks()
|
309 |
+
#: integer denoting starting position of a readable file-like body.
|
310 |
+
self._body_position = None
|
311 |
+
|
312 |
+
def prepare(self,
|
313 |
+
method=None, url=None, headers=None, files=None, data=None,
|
314 |
+
params=None, auth=None, cookies=None, hooks=None, json=None):
|
315 |
+
"""Prepares the entire request with the given parameters."""
|
316 |
+
|
317 |
+
self.prepare_method(method)
|
318 |
+
self.prepare_url(url, params)
|
319 |
+
self.prepare_headers(headers)
|
320 |
+
self.prepare_cookies(cookies)
|
321 |
+
self.prepare_body(data, files, json)
|
322 |
+
self.prepare_auth(auth, url)
|
323 |
+
|
324 |
+
# Note that prepare_auth must be last to enable authentication schemes
|
325 |
+
# such as OAuth to work on a fully prepared request.
|
326 |
+
|
327 |
+
# This MUST go after prepare_auth. Authenticators could add a hook
|
328 |
+
self.prepare_hooks(hooks)
|
329 |
+
|
330 |
+
def __repr__(self):
|
331 |
+
return '<PreparedRequest [%s]>' % (self.method)
|
332 |
+
|
333 |
+
def copy(self):
|
334 |
+
p = PreparedRequest()
|
335 |
+
p.method = self.method
|
336 |
+
p.url = self.url
|
337 |
+
p.headers = self.headers.copy() if self.headers is not None else None
|
338 |
+
p._cookies = _copy_cookie_jar(self._cookies)
|
339 |
+
p.body = self.body
|
340 |
+
p.hooks = self.hooks
|
341 |
+
p._body_position = self._body_position
|
342 |
+
return p
|
343 |
+
|
344 |
+
def prepare_method(self, method):
|
345 |
+
"""Prepares the given HTTP method."""
|
346 |
+
self.method = method
|
347 |
+
if self.method is not None:
|
348 |
+
self.method = to_native_string(self.method.upper())
|
349 |
+
|
350 |
+
@staticmethod
|
351 |
+
def _get_idna_encoded_host(host):
|
352 |
+
from pip._vendor import idna
|
353 |
+
|
354 |
+
try:
|
355 |
+
host = idna.encode(host, uts46=True).decode('utf-8')
|
356 |
+
except idna.IDNAError:
|
357 |
+
raise UnicodeError
|
358 |
+
return host
|
359 |
+
|
360 |
+
def prepare_url(self, url, params):
|
361 |
+
"""Prepares the given HTTP URL."""
|
362 |
+
#: Accept objects that have string representations.
|
363 |
+
#: We're unable to blindly call unicode/str functions
|
364 |
+
#: as this will include the bytestring indicator (b'')
|
365 |
+
#: on python 3.x.
|
366 |
+
#: https://github.com/psf/requests/pull/2238
|
367 |
+
if isinstance(url, bytes):
|
368 |
+
url = url.decode('utf8')
|
369 |
+
else:
|
370 |
+
url = unicode(url) if is_py2 else str(url)
|
371 |
+
|
372 |
+
# Remove leading whitespaces from url
|
373 |
+
url = url.lstrip()
|
374 |
+
|
375 |
+
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
|
376 |
+
# `data` etc to work around exceptions from `url_parse`, which
|
377 |
+
# handles RFC 3986 only.
|
378 |
+
if ':' in url and not url.lower().startswith('http'):
|
379 |
+
self.url = url
|
380 |
+
return
|
381 |
+
|
382 |
+
# Support for unicode domain names and paths.
|
383 |
+
try:
|
384 |
+
scheme, auth, host, port, path, query, fragment = parse_url(url)
|
385 |
+
except LocationParseError as e:
|
386 |
+
raise InvalidURL(*e.args)
|
387 |
+
|
388 |
+
if not scheme:
|
389 |
+
error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?")
|
390 |
+
error = error.format(to_native_string(url, 'utf8'))
|
391 |
+
|
392 |
+
raise MissingSchema(error)
|
393 |
+
|
394 |
+
if not host:
|
395 |
+
raise InvalidURL("Invalid URL %r: No host supplied" % url)
|
396 |
+
|
397 |
+
# In general, we want to try IDNA encoding the hostname if the string contains
|
398 |
+
# non-ASCII characters. This allows users to automatically get the correct IDNA
|
399 |
+
# behaviour. For strings containing only ASCII characters, we need to also verify
|
400 |
+
# it doesn't start with a wildcard (*), before allowing the unencoded hostname.
|
401 |
+
if not unicode_is_ascii(host):
|
402 |
+
try:
|
403 |
+
host = self._get_idna_encoded_host(host)
|
404 |
+
except UnicodeError:
|
405 |
+
raise InvalidURL('URL has an invalid label.')
|
406 |
+
elif host.startswith((u'*', u'.')):
|
407 |
+
raise InvalidURL('URL has an invalid label.')
|
408 |
+
|
409 |
+
# Carefully reconstruct the network location
|
410 |
+
netloc = auth or ''
|
411 |
+
if netloc:
|
412 |
+
netloc += '@'
|
413 |
+
netloc += host
|
414 |
+
if port:
|
415 |
+
netloc += ':' + str(port)
|
416 |
+
|
417 |
+
# Bare domains aren't valid URLs.
|
418 |
+
if not path:
|
419 |
+
path = '/'
|
420 |
+
|
421 |
+
if is_py2:
|
422 |
+
if isinstance(scheme, str):
|
423 |
+
scheme = scheme.encode('utf-8')
|
424 |
+
if isinstance(netloc, str):
|
425 |
+
netloc = netloc.encode('utf-8')
|
426 |
+
if isinstance(path, str):
|
427 |
+
path = path.encode('utf-8')
|
428 |
+
if isinstance(query, str):
|
429 |
+
query = query.encode('utf-8')
|
430 |
+
if isinstance(fragment, str):
|
431 |
+
fragment = fragment.encode('utf-8')
|
432 |
+
|
433 |
+
if isinstance(params, (str, bytes)):
|
434 |
+
params = to_native_string(params)
|
435 |
+
|
436 |
+
enc_params = self._encode_params(params)
|
437 |
+
if enc_params:
|
438 |
+
if query:
|
439 |
+
query = '%s&%s' % (query, enc_params)
|
440 |
+
else:
|
441 |
+
query = enc_params
|
442 |
+
|
443 |
+
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
|
444 |
+
self.url = url
|
445 |
+
|
446 |
+
def prepare_headers(self, headers):
|
447 |
+
"""Prepares the given HTTP headers."""
|
448 |
+
|
449 |
+
self.headers = CaseInsensitiveDict()
|
450 |
+
if headers:
|
451 |
+
for header in headers.items():
|
452 |
+
# Raise exception on invalid header value.
|
453 |
+
check_header_validity(header)
|
454 |
+
name, value = header
|
455 |
+
self.headers[to_native_string(name)] = value
|
456 |
+
|
457 |
+
def prepare_body(self, data, files, json=None):
|
458 |
+
"""Prepares the given HTTP body data."""
|
459 |
+
|
460 |
+
# Check if file, fo, generator, iterator.
|
461 |
+
# If not, run through normal process.
|
462 |
+
|
463 |
+
# Nottin' on you.
|
464 |
+
body = None
|
465 |
+
content_type = None
|
466 |
+
|
467 |
+
if not data and json is not None:
|
468 |
+
# urllib3 requires a bytes-like body. Python 2's json.dumps
|
469 |
+
# provides this natively, but Python 3 gives a Unicode string.
|
470 |
+
content_type = 'application/json'
|
471 |
+
|
472 |
+
try:
|
473 |
+
body = complexjson.dumps(json, allow_nan=False)
|
474 |
+
except ValueError as ve:
|
475 |
+
raise InvalidJSONError(ve, request=self)
|
476 |
+
|
477 |
+
if not isinstance(body, bytes):
|
478 |
+
body = body.encode('utf-8')
|
479 |
+
|
480 |
+
is_stream = all([
|
481 |
+
hasattr(data, '__iter__'),
|
482 |
+
not isinstance(data, (basestring, list, tuple, Mapping))
|
483 |
+
])
|
484 |
+
|
485 |
+
if is_stream:
|
486 |
+
try:
|
487 |
+
length = super_len(data)
|
488 |
+
except (TypeError, AttributeError, UnsupportedOperation):
|
489 |
+
length = None
|
490 |
+
|
491 |
+
body = data
|
492 |
+
|
493 |
+
if getattr(body, 'tell', None) is not None:
|
494 |
+
# Record the current file position before reading.
|
495 |
+
# This will allow us to rewind a file in the event
|
496 |
+
# of a redirect.
|
497 |
+
try:
|
498 |
+
self._body_position = body.tell()
|
499 |
+
except (IOError, OSError):
|
500 |
+
# This differentiates from None, allowing us to catch
|
501 |
+
# a failed `tell()` later when trying to rewind the body
|
502 |
+
self._body_position = object()
|
503 |
+
|
504 |
+
if files:
|
505 |
+
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
|
506 |
+
|
507 |
+
if length:
|
508 |
+
self.headers['Content-Length'] = builtin_str(length)
|
509 |
+
else:
|
510 |
+
self.headers['Transfer-Encoding'] = 'chunked'
|
511 |
+
else:
|
512 |
+
# Multi-part file uploads.
|
513 |
+
if files:
|
514 |
+
(body, content_type) = self._encode_files(files, data)
|
515 |
+
else:
|
516 |
+
if data:
|
517 |
+
body = self._encode_params(data)
|
518 |
+
if isinstance(data, basestring) or hasattr(data, 'read'):
|
519 |
+
content_type = None
|
520 |
+
else:
|
521 |
+
content_type = 'application/x-www-form-urlencoded'
|
522 |
+
|
523 |
+
self.prepare_content_length(body)
|
524 |
+
|
525 |
+
# Add content-type if it wasn't explicitly provided.
|
526 |
+
if content_type and ('content-type' not in self.headers):
|
527 |
+
self.headers['Content-Type'] = content_type
|
528 |
+
|
529 |
+
self.body = body
|
530 |
+
|
531 |
+
def prepare_content_length(self, body):
|
532 |
+
"""Prepare Content-Length header based on request method and body"""
|
533 |
+
if body is not None:
|
534 |
+
length = super_len(body)
|
535 |
+
if length:
|
536 |
+
# If length exists, set it. Otherwise, we fallback
|
537 |
+
# to Transfer-Encoding: chunked.
|
538 |
+
self.headers['Content-Length'] = builtin_str(length)
|
539 |
+
elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:
|
540 |
+
# Set Content-Length to 0 for methods that can have a body
|
541 |
+
# but don't provide one. (i.e. not GET or HEAD)
|
542 |
+
self.headers['Content-Length'] = '0'
|
543 |
+
|
544 |
+
def prepare_auth(self, auth, url=''):
|
545 |
+
"""Prepares the given HTTP auth data."""
|
546 |
+
|
547 |
+
# If no Auth is explicitly provided, extract it from the URL first.
|
548 |
+
if auth is None:
|
549 |
+
url_auth = get_auth_from_url(self.url)
|
550 |
+
auth = url_auth if any(url_auth) else None
|
551 |
+
|
552 |
+
if auth:
|
553 |
+
if isinstance(auth, tuple) and len(auth) == 2:
|
554 |
+
# special-case basic HTTP auth
|
555 |
+
auth = HTTPBasicAuth(*auth)
|
556 |
+
|
557 |
+
# Allow auth to make its changes.
|
558 |
+
r = auth(self)
|
559 |
+
|
560 |
+
# Update self to reflect the auth changes.
|
561 |
+
self.__dict__.update(r.__dict__)
|
562 |
+
|
563 |
+
# Recompute Content-Length
|
564 |
+
self.prepare_content_length(self.body)
|
565 |
+
|
566 |
+
def prepare_cookies(self, cookies):
|
567 |
+
"""Prepares the given HTTP cookie data.
|
568 |
+
|
569 |
+
This function eventually generates a ``Cookie`` header from the
|
570 |
+
given cookies using cookielib. Due to cookielib's design, the header
|
571 |
+
will not be regenerated if it already exists, meaning this function
|
572 |
+
can only be called once for the life of the
|
573 |
+
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
|
574 |
+
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
|
575 |
+
header is removed beforehand.
|
576 |
+
"""
|
577 |
+
if isinstance(cookies, cookielib.CookieJar):
|
578 |
+
self._cookies = cookies
|
579 |
+
else:
|
580 |
+
self._cookies = cookiejar_from_dict(cookies)
|
581 |
+
|
582 |
+
cookie_header = get_cookie_header(self._cookies, self)
|
583 |
+
if cookie_header is not None:
|
584 |
+
self.headers['Cookie'] = cookie_header
|
585 |
+
|
586 |
+
def prepare_hooks(self, hooks):
|
587 |
+
"""Prepares the given hooks."""
|
588 |
+
# hooks can be passed as None to the prepare method and to this
|
589 |
+
# method. To prevent iterating over None, simply use an empty list
|
590 |
+
# if hooks is False-y
|
591 |
+
hooks = hooks or []
|
592 |
+
for event in hooks:
|
593 |
+
self.register_hook(event, hooks[event])
|
594 |
+
|
595 |
+
|
596 |
+
class Response(object):
|
597 |
+
"""The :class:`Response <Response>` object, which contains a
|
598 |
+
server's response to an HTTP request.
|
599 |
+
"""
|
600 |
+
|
601 |
+
__attrs__ = [
|
602 |
+
'_content', 'status_code', 'headers', 'url', 'history',
|
603 |
+
'encoding', 'reason', 'cookies', 'elapsed', 'request'
|
604 |
+
]
|
605 |
+
|
606 |
+
def __init__(self):
|
607 |
+
self._content = False
|
608 |
+
self._content_consumed = False
|
609 |
+
self._next = None
|
610 |
+
|
611 |
+
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
|
612 |
+
self.status_code = None
|
613 |
+
|
614 |
+
#: Case-insensitive Dictionary of Response Headers.
|
615 |
+
#: For example, ``headers['content-encoding']`` will return the
|
616 |
+
#: value of a ``'Content-Encoding'`` response header.
|
617 |
+
self.headers = CaseInsensitiveDict()
|
618 |
+
|
619 |
+
#: File-like object representation of response (for advanced usage).
|
620 |
+
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
|
621 |
+
#: This requirement does not apply for use internally to Requests.
|
622 |
+
self.raw = None
|
623 |
+
|
624 |
+
#: Final URL location of Response.
|
625 |
+
self.url = None
|
626 |
+
|
627 |
+
#: Encoding to decode with when accessing r.text.
|
628 |
+
self.encoding = None
|
629 |
+
|
630 |
+
#: A list of :class:`Response <Response>` objects from
|
631 |
+
#: the history of the Request. Any redirect responses will end
|
632 |
+
#: up here. The list is sorted from the oldest to the most recent request.
|
633 |
+
self.history = []
|
634 |
+
|
635 |
+
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
|
636 |
+
self.reason = None
|
637 |
+
|
638 |
+
#: A CookieJar of Cookies the server sent back.
|
639 |
+
self.cookies = cookiejar_from_dict({})
|
640 |
+
|
641 |
+
#: The amount of time elapsed between sending the request
|
642 |
+
#: and the arrival of the response (as a timedelta).
|
643 |
+
#: This property specifically measures the time taken between sending
|
644 |
+
#: the first byte of the request and finishing parsing the headers. It
|
645 |
+
#: is therefore unaffected by consuming the response content or the
|
646 |
+
#: value of the ``stream`` keyword argument.
|
647 |
+
self.elapsed = datetime.timedelta(0)
|
648 |
+
|
649 |
+
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
|
650 |
+
#: is a response.
|
651 |
+
self.request = None
|
652 |
+
|
653 |
+
def __enter__(self):
|
654 |
+
return self
|
655 |
+
|
656 |
+
def __exit__(self, *args):
|
657 |
+
self.close()
|
658 |
+
|
659 |
+
def __getstate__(self):
|
660 |
+
# Consume everything; accessing the content attribute makes
|
661 |
+
# sure the content has been fully read.
|
662 |
+
if not self._content_consumed:
|
663 |
+
self.content
|
664 |
+
|
665 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
666 |
+
|
667 |
+
def __setstate__(self, state):
|
668 |
+
for name, value in state.items():
|
669 |
+
setattr(self, name, value)
|
670 |
+
|
671 |
+
# pickled objects do not have .raw
|
672 |
+
setattr(self, '_content_consumed', True)
|
673 |
+
setattr(self, 'raw', None)
|
674 |
+
|
675 |
+
def __repr__(self):
|
676 |
+
return '<Response [%s]>' % (self.status_code)
|
677 |
+
|
678 |
+
def __bool__(self):
|
679 |
+
"""Returns True if :attr:`status_code` is less than 400.
|
680 |
+
|
681 |
+
This attribute checks if the status code of the response is between
|
682 |
+
400 and 600 to see if there was a client error or a server error. If
|
683 |
+
the status code, is between 200 and 400, this will return True. This
|
684 |
+
is **not** a check to see if the response code is ``200 OK``.
|
685 |
+
"""
|
686 |
+
return self.ok
|
687 |
+
|
688 |
+
def __nonzero__(self):
|
689 |
+
"""Returns True if :attr:`status_code` is less than 400.
|
690 |
+
|
691 |
+
This attribute checks if the status code of the response is between
|
692 |
+
400 and 600 to see if there was a client error or a server error. If
|
693 |
+
the status code, is between 200 and 400, this will return True. This
|
694 |
+
is **not** a check to see if the response code is ``200 OK``.
|
695 |
+
"""
|
696 |
+
return self.ok
|
697 |
+
|
698 |
+
def __iter__(self):
|
699 |
+
"""Allows you to use a response as an iterator."""
|
700 |
+
return self.iter_content(128)
|
701 |
+
|
702 |
+
@property
|
703 |
+
def ok(self):
|
704 |
+
"""Returns True if :attr:`status_code` is less than 400, False if not.
|
705 |
+
|
706 |
+
This attribute checks if the status code of the response is between
|
707 |
+
400 and 600 to see if there was a client error or a server error. If
|
708 |
+
the status code is between 200 and 400, this will return True. This
|
709 |
+
is **not** a check to see if the response code is ``200 OK``.
|
710 |
+
"""
|
711 |
+
try:
|
712 |
+
self.raise_for_status()
|
713 |
+
except HTTPError:
|
714 |
+
return False
|
715 |
+
return True
|
716 |
+
|
717 |
+
@property
|
718 |
+
def is_redirect(self):
|
719 |
+
"""True if this Response is a well-formed HTTP redirect that could have
|
720 |
+
been processed automatically (by :meth:`Session.resolve_redirects`).
|
721 |
+
"""
|
722 |
+
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
|
723 |
+
|
724 |
+
@property
|
725 |
+
def is_permanent_redirect(self):
|
726 |
+
"""True if this Response one of the permanent versions of redirect."""
|
727 |
+
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
|
728 |
+
|
729 |
+
@property
|
730 |
+
def next(self):
|
731 |
+
"""Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
|
732 |
+
return self._next
|
733 |
+
|
734 |
+
@property
|
735 |
+
def apparent_encoding(self):
|
736 |
+
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
|
737 |
+
return chardet.detect(self.content)['encoding']
|
738 |
+
|
739 |
+
def iter_content(self, chunk_size=1, decode_unicode=False):
|
740 |
+
"""Iterates over the response data. When stream=True is set on the
|
741 |
+
request, this avoids reading the content at once into memory for
|
742 |
+
large responses. The chunk size is the number of bytes it should
|
743 |
+
read into memory. This is not necessarily the length of each item
|
744 |
+
returned as decoding can take place.
|
745 |
+
|
746 |
+
chunk_size must be of type int or None. A value of None will
|
747 |
+
function differently depending on the value of `stream`.
|
748 |
+
stream=True will read data as it arrives in whatever size the
|
749 |
+
chunks are received. If stream=False, data is returned as
|
750 |
+
a single chunk.
|
751 |
+
|
752 |
+
If decode_unicode is True, content will be decoded using the best
|
753 |
+
available encoding based on the response.
|
754 |
+
"""
|
755 |
+
|
756 |
+
def generate():
|
757 |
+
# Special case for urllib3.
|
758 |
+
if hasattr(self.raw, 'stream'):
|
759 |
+
try:
|
760 |
+
for chunk in self.raw.stream(chunk_size, decode_content=True):
|
761 |
+
yield chunk
|
762 |
+
except ProtocolError as e:
|
763 |
+
raise ChunkedEncodingError(e)
|
764 |
+
except DecodeError as e:
|
765 |
+
raise ContentDecodingError(e)
|
766 |
+
except ReadTimeoutError as e:
|
767 |
+
raise ConnectionError(e)
|
768 |
+
else:
|
769 |
+
# Standard file-like object.
|
770 |
+
while True:
|
771 |
+
chunk = self.raw.read(chunk_size)
|
772 |
+
if not chunk:
|
773 |
+
break
|
774 |
+
yield chunk
|
775 |
+
|
776 |
+
self._content_consumed = True
|
777 |
+
|
778 |
+
if self._content_consumed and isinstance(self._content, bool):
|
779 |
+
raise StreamConsumedError()
|
780 |
+
elif chunk_size is not None and not isinstance(chunk_size, int):
|
781 |
+
raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))
|
782 |
+
# simulate reading small chunks of the content
|
783 |
+
reused_chunks = iter_slices(self._content, chunk_size)
|
784 |
+
|
785 |
+
stream_chunks = generate()
|
786 |
+
|
787 |
+
chunks = reused_chunks if self._content_consumed else stream_chunks
|
788 |
+
|
789 |
+
if decode_unicode:
|
790 |
+
chunks = stream_decode_response_unicode(chunks, self)
|
791 |
+
|
792 |
+
return chunks
|
793 |
+
|
794 |
+
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):
|
795 |
+
"""Iterates over the response data, one line at a time. When
|
796 |
+
stream=True is set on the request, this avoids reading the
|
797 |
+
content at once into memory for large responses.
|
798 |
+
|
799 |
+
.. note:: This method is not reentrant safe.
|
800 |
+
"""
|
801 |
+
|
802 |
+
pending = None
|
803 |
+
|
804 |
+
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
|
805 |
+
|
806 |
+
if pending is not None:
|
807 |
+
chunk = pending + chunk
|
808 |
+
|
809 |
+
if delimiter:
|
810 |
+
lines = chunk.split(delimiter)
|
811 |
+
else:
|
812 |
+
lines = chunk.splitlines()
|
813 |
+
|
814 |
+
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
|
815 |
+
pending = lines.pop()
|
816 |
+
else:
|
817 |
+
pending = None
|
818 |
+
|
819 |
+
for line in lines:
|
820 |
+
yield line
|
821 |
+
|
822 |
+
if pending is not None:
|
823 |
+
yield pending
|
824 |
+
|
825 |
+
@property
|
826 |
+
def content(self):
|
827 |
+
"""Content of the response, in bytes."""
|
828 |
+
|
829 |
+
if self._content is False:
|
830 |
+
# Read the contents.
|
831 |
+
if self._content_consumed:
|
832 |
+
raise RuntimeError(
|
833 |
+
'The content for this response was already consumed')
|
834 |
+
|
835 |
+
if self.status_code == 0 or self.raw is None:
|
836 |
+
self._content = None
|
837 |
+
else:
|
838 |
+
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
|
839 |
+
|
840 |
+
self._content_consumed = True
|
841 |
+
# don't need to release the connection; that's been handled by urllib3
|
842 |
+
# since we exhausted the data.
|
843 |
+
return self._content
|
844 |
+
|
845 |
+
@property
|
846 |
+
def text(self):
|
847 |
+
"""Content of the response, in unicode.
|
848 |
+
|
849 |
+
If Response.encoding is None, encoding will be guessed using
|
850 |
+
``charset_normalizer`` or ``chardet``.
|
851 |
+
|
852 |
+
The encoding of the response content is determined based solely on HTTP
|
853 |
+
headers, following RFC 2616 to the letter. If you can take advantage of
|
854 |
+
non-HTTP knowledge to make a better guess at the encoding, you should
|
855 |
+
set ``r.encoding`` appropriately before accessing this property.
|
856 |
+
"""
|
857 |
+
|
858 |
+
# Try charset from content-type
|
859 |
+
content = None
|
860 |
+
encoding = self.encoding
|
861 |
+
|
862 |
+
if not self.content:
|
863 |
+
return str('')
|
864 |
+
|
865 |
+
# Fallback to auto-detected encoding.
|
866 |
+
if self.encoding is None:
|
867 |
+
encoding = self.apparent_encoding
|
868 |
+
|
869 |
+
# Decode unicode from given encoding.
|
870 |
+
try:
|
871 |
+
content = str(self.content, encoding, errors='replace')
|
872 |
+
except (LookupError, TypeError):
|
873 |
+
# A LookupError is raised if the encoding was not found which could
|
874 |
+
# indicate a misspelling or similar mistake.
|
875 |
+
#
|
876 |
+
# A TypeError can be raised if encoding is None
|
877 |
+
#
|
878 |
+
# So we try blindly encoding.
|
879 |
+
content = str(self.content, errors='replace')
|
880 |
+
|
881 |
+
return content
|
882 |
+
|
883 |
+
def json(self, **kwargs):
|
884 |
+
r"""Returns the json-encoded content of a response, if any.
|
885 |
+
|
886 |
+
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
|
887 |
+
:raises requests.exceptions.JSONDecodeError: If the response body does not
|
888 |
+
contain valid json.
|
889 |
+
"""
|
890 |
+
|
891 |
+
if not self.encoding and self.content and len(self.content) > 3:
|
892 |
+
# No encoding set. JSON RFC 4627 section 3 states we should expect
|
893 |
+
# UTF-8, -16 or -32. Detect which one to use; If the detection or
|
894 |
+
# decoding fails, fall back to `self.text` (using charset_normalizer to make
|
895 |
+
# a best guess).
|
896 |
+
encoding = guess_json_utf(self.content)
|
897 |
+
if encoding is not None:
|
898 |
+
try:
|
899 |
+
return complexjson.loads(
|
900 |
+
self.content.decode(encoding), **kwargs
|
901 |
+
)
|
902 |
+
except UnicodeDecodeError:
|
903 |
+
# Wrong UTF codec detected; usually because it's not UTF-8
|
904 |
+
# but some other 8-bit codec. This is an RFC violation,
|
905 |
+
# and the server didn't bother to tell us what codec *was*
|
906 |
+
# used.
|
907 |
+
pass
|
908 |
+
|
909 |
+
try:
|
910 |
+
return complexjson.loads(self.text, **kwargs)
|
911 |
+
except JSONDecodeError as e:
|
912 |
+
# Catch JSON-related errors and raise as requests.JSONDecodeError
|
913 |
+
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
|
914 |
+
if is_py2: # e is a ValueError
|
915 |
+
raise RequestsJSONDecodeError(e.message)
|
916 |
+
else:
|
917 |
+
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
918 |
+
|
919 |
+
@property
|
920 |
+
def links(self):
|
921 |
+
"""Returns the parsed header links of the response, if any."""
|
922 |
+
|
923 |
+
header = self.headers.get('link')
|
924 |
+
|
925 |
+
# l = MultiDict()
|
926 |
+
l = {}
|
927 |
+
|
928 |
+
if header:
|
929 |
+
links = parse_header_links(header)
|
930 |
+
|
931 |
+
for link in links:
|
932 |
+
key = link.get('rel') or link.get('url')
|
933 |
+
l[key] = link
|
934 |
+
|
935 |
+
return l
|
936 |
+
|
937 |
+
def raise_for_status(self):
|
938 |
+
"""Raises :class:`HTTPError`, if one occurred."""
|
939 |
+
|
940 |
+
http_error_msg = ''
|
941 |
+
if isinstance(self.reason, bytes):
|
942 |
+
# We attempt to decode utf-8 first because some servers
|
943 |
+
# choose to localize their reason strings. If the string
|
944 |
+
# isn't utf-8, we fall back to iso-8859-1 for all other
|
945 |
+
# encodings. (See PR #3538)
|
946 |
+
try:
|
947 |
+
reason = self.reason.decode('utf-8')
|
948 |
+
except UnicodeDecodeError:
|
949 |
+
reason = self.reason.decode('iso-8859-1')
|
950 |
+
else:
|
951 |
+
reason = self.reason
|
952 |
+
|
953 |
+
if 400 <= self.status_code < 500:
|
954 |
+
http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)
|
955 |
+
|
956 |
+
elif 500 <= self.status_code < 600:
|
957 |
+
http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)
|
958 |
+
|
959 |
+
if http_error_msg:
|
960 |
+
raise HTTPError(http_error_msg, response=self)
|
961 |
+
|
962 |
+
def close(self):
|
963 |
+
"""Releases the connection back to the pool. Once this method has been
|
964 |
+
called the underlying ``raw`` object must not be accessed again.
|
965 |
+
|
966 |
+
*Note: Should not normally need to be called explicitly.*
|
967 |
+
"""
|
968 |
+
if not self._content_consumed:
|
969 |
+
self.raw.close()
|
970 |
+
|
971 |
+
release_conn = getattr(self.raw, 'release_conn', None)
|
972 |
+
if release_conn is not None:
|
973 |
+
release_conn()
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/packages.py
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
|
3 |
+
# This code exists for backwards compatibility reasons.
|
4 |
+
# I don't like it either. Just look the other way. :)
|
5 |
+
|
6 |
+
for package in ('urllib3', 'idna', 'chardet'):
|
7 |
+
vendored_package = "pip._vendor." + package
|
8 |
+
locals()[package] = __import__(vendored_package)
|
9 |
+
# This traversal is apparently necessary such that the identities are
|
10 |
+
# preserved (requests.packages.urllib3.* is urllib3.*)
|
11 |
+
for mod in list(sys.modules):
|
12 |
+
if mod == vendored_package or mod.startswith(vendored_package + '.'):
|
13 |
+
unprefixed_mod = mod[len("pip._vendor."):]
|
14 |
+
sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod]
|
15 |
+
|
16 |
+
# Kinda cool, though, right?
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/sessions.py
ADDED
@@ -0,0 +1,771 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.sessions
|
5 |
+
~~~~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module provides a Session object to manage and persist settings across
|
8 |
+
requests (cookies, auth, proxies).
|
9 |
+
"""
|
10 |
+
import os
|
11 |
+
import sys
|
12 |
+
import time
|
13 |
+
from datetime import timedelta
|
14 |
+
from collections import OrderedDict
|
15 |
+
|
16 |
+
from .auth import _basic_auth_str
|
17 |
+
from .compat import cookielib, is_py3, urljoin, urlparse, Mapping
|
18 |
+
from .cookies import (
|
19 |
+
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
|
20 |
+
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
|
21 |
+
from .hooks import default_hooks, dispatch_hook
|
22 |
+
from ._internal_utils import to_native_string
|
23 |
+
from .utils import to_key_val_list, default_headers, DEFAULT_PORTS
|
24 |
+
from .exceptions import (
|
25 |
+
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
|
26 |
+
|
27 |
+
from .structures import CaseInsensitiveDict
|
28 |
+
from .adapters import HTTPAdapter
|
29 |
+
|
30 |
+
from .utils import (
|
31 |
+
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
|
32 |
+
get_auth_from_url, rewind_body, resolve_proxies
|
33 |
+
)
|
34 |
+
|
35 |
+
from .status_codes import codes
|
36 |
+
|
37 |
+
# formerly defined here, reexposed here for backward compatibility
|
38 |
+
from .models import REDIRECT_STATI
|
39 |
+
|
40 |
+
# Preferred clock, based on which one is more accurate on a given system.
|
41 |
+
if sys.platform == 'win32':
|
42 |
+
try: # Python 3.4+
|
43 |
+
preferred_clock = time.perf_counter
|
44 |
+
except AttributeError: # Earlier than Python 3.
|
45 |
+
preferred_clock = time.clock
|
46 |
+
else:
|
47 |
+
preferred_clock = time.time
|
48 |
+
|
49 |
+
|
50 |
+
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
|
51 |
+
"""Determines appropriate setting for a given request, taking into account
|
52 |
+
the explicit setting on that request, and the setting in the session. If a
|
53 |
+
setting is a dictionary, they will be merged together using `dict_class`
|
54 |
+
"""
|
55 |
+
|
56 |
+
if session_setting is None:
|
57 |
+
return request_setting
|
58 |
+
|
59 |
+
if request_setting is None:
|
60 |
+
return session_setting
|
61 |
+
|
62 |
+
# Bypass if not a dictionary (e.g. verify)
|
63 |
+
if not (
|
64 |
+
isinstance(session_setting, Mapping) and
|
65 |
+
isinstance(request_setting, Mapping)
|
66 |
+
):
|
67 |
+
return request_setting
|
68 |
+
|
69 |
+
merged_setting = dict_class(to_key_val_list(session_setting))
|
70 |
+
merged_setting.update(to_key_val_list(request_setting))
|
71 |
+
|
72 |
+
# Remove keys that are set to None. Extract keys first to avoid altering
|
73 |
+
# the dictionary during iteration.
|
74 |
+
none_keys = [k for (k, v) in merged_setting.items() if v is None]
|
75 |
+
for key in none_keys:
|
76 |
+
del merged_setting[key]
|
77 |
+
|
78 |
+
return merged_setting
|
79 |
+
|
80 |
+
|
81 |
+
def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
|
82 |
+
"""Properly merges both requests and session hooks.
|
83 |
+
|
84 |
+
This is necessary because when request_hooks == {'response': []}, the
|
85 |
+
merge breaks Session hooks entirely.
|
86 |
+
"""
|
87 |
+
if session_hooks is None or session_hooks.get('response') == []:
|
88 |
+
return request_hooks
|
89 |
+
|
90 |
+
if request_hooks is None or request_hooks.get('response') == []:
|
91 |
+
return session_hooks
|
92 |
+
|
93 |
+
return merge_setting(request_hooks, session_hooks, dict_class)
|
94 |
+
|
95 |
+
|
96 |
+
class SessionRedirectMixin(object):
|
97 |
+
|
98 |
+
def get_redirect_target(self, resp):
|
99 |
+
"""Receives a Response. Returns a redirect URI or ``None``"""
|
100 |
+
# Due to the nature of how requests processes redirects this method will
|
101 |
+
# be called at least once upon the original response and at least twice
|
102 |
+
# on each subsequent redirect response (if any).
|
103 |
+
# If a custom mixin is used to handle this logic, it may be advantageous
|
104 |
+
# to cache the redirect location onto the response object as a private
|
105 |
+
# attribute.
|
106 |
+
if resp.is_redirect:
|
107 |
+
location = resp.headers['location']
|
108 |
+
# Currently the underlying http module on py3 decode headers
|
109 |
+
# in latin1, but empirical evidence suggests that latin1 is very
|
110 |
+
# rarely used with non-ASCII characters in HTTP headers.
|
111 |
+
# It is more likely to get UTF8 header rather than latin1.
|
112 |
+
# This causes incorrect handling of UTF8 encoded location headers.
|
113 |
+
# To solve this, we re-encode the location in latin1.
|
114 |
+
if is_py3:
|
115 |
+
location = location.encode('latin1')
|
116 |
+
return to_native_string(location, 'utf8')
|
117 |
+
return None
|
118 |
+
|
119 |
+
def should_strip_auth(self, old_url, new_url):
|
120 |
+
"""Decide whether Authorization header should be removed when redirecting"""
|
121 |
+
old_parsed = urlparse(old_url)
|
122 |
+
new_parsed = urlparse(new_url)
|
123 |
+
if old_parsed.hostname != new_parsed.hostname:
|
124 |
+
return True
|
125 |
+
# Special case: allow http -> https redirect when using the standard
|
126 |
+
# ports. This isn't specified by RFC 7235, but is kept to avoid
|
127 |
+
# breaking backwards compatibility with older versions of requests
|
128 |
+
# that allowed any redirects on the same host.
|
129 |
+
if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)
|
130 |
+
and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):
|
131 |
+
return False
|
132 |
+
|
133 |
+
# Handle default port usage corresponding to scheme.
|
134 |
+
changed_port = old_parsed.port != new_parsed.port
|
135 |
+
changed_scheme = old_parsed.scheme != new_parsed.scheme
|
136 |
+
default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
|
137 |
+
if (not changed_scheme and old_parsed.port in default_port
|
138 |
+
and new_parsed.port in default_port):
|
139 |
+
return False
|
140 |
+
|
141 |
+
# Standard case: root URI must match
|
142 |
+
return changed_port or changed_scheme
|
143 |
+
|
144 |
+
def resolve_redirects(self, resp, req, stream=False, timeout=None,
|
145 |
+
verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):
|
146 |
+
"""Receives a Response. Returns a generator of Responses or Requests."""
|
147 |
+
|
148 |
+
hist = [] # keep track of history
|
149 |
+
|
150 |
+
url = self.get_redirect_target(resp)
|
151 |
+
previous_fragment = urlparse(req.url).fragment
|
152 |
+
while url:
|
153 |
+
prepared_request = req.copy()
|
154 |
+
|
155 |
+
# Update history and keep track of redirects.
|
156 |
+
# resp.history must ignore the original request in this loop
|
157 |
+
hist.append(resp)
|
158 |
+
resp.history = hist[1:]
|
159 |
+
|
160 |
+
try:
|
161 |
+
resp.content # Consume socket so it can be released
|
162 |
+
except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
|
163 |
+
resp.raw.read(decode_content=False)
|
164 |
+
|
165 |
+
if len(resp.history) >= self.max_redirects:
|
166 |
+
raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)
|
167 |
+
|
168 |
+
# Release the connection back into the pool.
|
169 |
+
resp.close()
|
170 |
+
|
171 |
+
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
172 |
+
if url.startswith('//'):
|
173 |
+
parsed_rurl = urlparse(resp.url)
|
174 |
+
url = ':'.join([to_native_string(parsed_rurl.scheme), url])
|
175 |
+
|
176 |
+
# Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
|
177 |
+
parsed = urlparse(url)
|
178 |
+
if parsed.fragment == '' and previous_fragment:
|
179 |
+
parsed = parsed._replace(fragment=previous_fragment)
|
180 |
+
elif parsed.fragment:
|
181 |
+
previous_fragment = parsed.fragment
|
182 |
+
url = parsed.geturl()
|
183 |
+
|
184 |
+
# Facilitate relative 'location' headers, as allowed by RFC 7231.
|
185 |
+
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
186 |
+
# Compliant with RFC3986, we percent encode the url.
|
187 |
+
if not parsed.netloc:
|
188 |
+
url = urljoin(resp.url, requote_uri(url))
|
189 |
+
else:
|
190 |
+
url = requote_uri(url)
|
191 |
+
|
192 |
+
prepared_request.url = to_native_string(url)
|
193 |
+
|
194 |
+
self.rebuild_method(prepared_request, resp)
|
195 |
+
|
196 |
+
# https://github.com/psf/requests/issues/1084
|
197 |
+
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
|
198 |
+
# https://github.com/psf/requests/issues/3490
|
199 |
+
purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')
|
200 |
+
for header in purged_headers:
|
201 |
+
prepared_request.headers.pop(header, None)
|
202 |
+
prepared_request.body = None
|
203 |
+
|
204 |
+
headers = prepared_request.headers
|
205 |
+
headers.pop('Cookie', None)
|
206 |
+
|
207 |
+
# Extract any cookies sent on the response to the cookiejar
|
208 |
+
# in the new request. Because we've mutated our copied prepared
|
209 |
+
# request, use the old one that we haven't yet touched.
|
210 |
+
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
|
211 |
+
merge_cookies(prepared_request._cookies, self.cookies)
|
212 |
+
prepared_request.prepare_cookies(prepared_request._cookies)
|
213 |
+
|
214 |
+
# Rebuild auth and proxy information.
|
215 |
+
proxies = self.rebuild_proxies(prepared_request, proxies)
|
216 |
+
self.rebuild_auth(prepared_request, resp)
|
217 |
+
|
218 |
+
# A failed tell() sets `_body_position` to `object()`. This non-None
|
219 |
+
# value ensures `rewindable` will be True, allowing us to raise an
|
220 |
+
# UnrewindableBodyError, instead of hanging the connection.
|
221 |
+
rewindable = (
|
222 |
+
prepared_request._body_position is not None and
|
223 |
+
('Content-Length' in headers or 'Transfer-Encoding' in headers)
|
224 |
+
)
|
225 |
+
|
226 |
+
# Attempt to rewind consumed file-like object.
|
227 |
+
if rewindable:
|
228 |
+
rewind_body(prepared_request)
|
229 |
+
|
230 |
+
# Override the original request.
|
231 |
+
req = prepared_request
|
232 |
+
|
233 |
+
if yield_requests:
|
234 |
+
yield req
|
235 |
+
else:
|
236 |
+
|
237 |
+
resp = self.send(
|
238 |
+
req,
|
239 |
+
stream=stream,
|
240 |
+
timeout=timeout,
|
241 |
+
verify=verify,
|
242 |
+
cert=cert,
|
243 |
+
proxies=proxies,
|
244 |
+
allow_redirects=False,
|
245 |
+
**adapter_kwargs
|
246 |
+
)
|
247 |
+
|
248 |
+
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
|
249 |
+
|
250 |
+
# extract redirect url, if any, for the next loop
|
251 |
+
url = self.get_redirect_target(resp)
|
252 |
+
yield resp
|
253 |
+
|
254 |
+
def rebuild_auth(self, prepared_request, response):
|
255 |
+
"""When being redirected we may want to strip authentication from the
|
256 |
+
request to avoid leaking credentials. This method intelligently removes
|
257 |
+
and reapplies authentication where possible to avoid credential loss.
|
258 |
+
"""
|
259 |
+
headers = prepared_request.headers
|
260 |
+
url = prepared_request.url
|
261 |
+
|
262 |
+
if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):
|
263 |
+
# If we get redirected to a new host, we should strip out any
|
264 |
+
# authentication headers.
|
265 |
+
del headers['Authorization']
|
266 |
+
|
267 |
+
# .netrc might have more auth for us on our new host.
|
268 |
+
new_auth = get_netrc_auth(url) if self.trust_env else None
|
269 |
+
if new_auth is not None:
|
270 |
+
prepared_request.prepare_auth(new_auth)
|
271 |
+
|
272 |
+
def rebuild_proxies(self, prepared_request, proxies):
|
273 |
+
"""This method re-evaluates the proxy configuration by considering the
|
274 |
+
environment variables. If we are redirected to a URL covered by
|
275 |
+
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
|
276 |
+
proxy keys for this URL (in case they were stripped by a previous
|
277 |
+
redirect).
|
278 |
+
|
279 |
+
This method also replaces the Proxy-Authorization header where
|
280 |
+
necessary.
|
281 |
+
|
282 |
+
:rtype: dict
|
283 |
+
"""
|
284 |
+
headers = prepared_request.headers
|
285 |
+
scheme = urlparse(prepared_request.url).scheme
|
286 |
+
new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
|
287 |
+
|
288 |
+
if 'Proxy-Authorization' in headers:
|
289 |
+
del headers['Proxy-Authorization']
|
290 |
+
|
291 |
+
try:
|
292 |
+
username, password = get_auth_from_url(new_proxies[scheme])
|
293 |
+
except KeyError:
|
294 |
+
username, password = None, None
|
295 |
+
|
296 |
+
if username and password:
|
297 |
+
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
|
298 |
+
|
299 |
+
return new_proxies
|
300 |
+
|
301 |
+
def rebuild_method(self, prepared_request, response):
|
302 |
+
"""When being redirected we may want to change the method of the request
|
303 |
+
based on certain specs or browser behavior.
|
304 |
+
"""
|
305 |
+
method = prepared_request.method
|
306 |
+
|
307 |
+
# https://tools.ietf.org/html/rfc7231#section-6.4.4
|
308 |
+
if response.status_code == codes.see_other and method != 'HEAD':
|
309 |
+
method = 'GET'
|
310 |
+
|
311 |
+
# Do what the browsers do, despite standards...
|
312 |
+
# First, turn 302s into GETs.
|
313 |
+
if response.status_code == codes.found and method != 'HEAD':
|
314 |
+
method = 'GET'
|
315 |
+
|
316 |
+
# Second, if a POST is responded to with a 301, turn it into a GET.
|
317 |
+
# This bizarre behaviour is explained in Issue 1704.
|
318 |
+
if response.status_code == codes.moved and method == 'POST':
|
319 |
+
method = 'GET'
|
320 |
+
|
321 |
+
prepared_request.method = method
|
322 |
+
|
323 |
+
|
324 |
+
class Session(SessionRedirectMixin):
|
325 |
+
"""A Requests session.
|
326 |
+
|
327 |
+
Provides cookie persistence, connection-pooling, and configuration.
|
328 |
+
|
329 |
+
Basic Usage::
|
330 |
+
|
331 |
+
>>> import requests
|
332 |
+
>>> s = requests.Session()
|
333 |
+
>>> s.get('https://httpbin.org/get')
|
334 |
+
<Response [200]>
|
335 |
+
|
336 |
+
Or as a context manager::
|
337 |
+
|
338 |
+
>>> with requests.Session() as s:
|
339 |
+
... s.get('https://httpbin.org/get')
|
340 |
+
<Response [200]>
|
341 |
+
"""
|
342 |
+
|
343 |
+
__attrs__ = [
|
344 |
+
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
|
345 |
+
'cert', 'adapters', 'stream', 'trust_env',
|
346 |
+
'max_redirects',
|
347 |
+
]
|
348 |
+
|
349 |
+
def __init__(self):
|
350 |
+
|
351 |
+
#: A case-insensitive dictionary of headers to be sent on each
|
352 |
+
#: :class:`Request <Request>` sent from this
|
353 |
+
#: :class:`Session <Session>`.
|
354 |
+
self.headers = default_headers()
|
355 |
+
|
356 |
+
#: Default Authentication tuple or object to attach to
|
357 |
+
#: :class:`Request <Request>`.
|
358 |
+
self.auth = None
|
359 |
+
|
360 |
+
#: Dictionary mapping protocol or protocol and host to the URL of the proxy
|
361 |
+
#: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
|
362 |
+
#: be used on each :class:`Request <Request>`.
|
363 |
+
self.proxies = {}
|
364 |
+
|
365 |
+
#: Event-handling hooks.
|
366 |
+
self.hooks = default_hooks()
|
367 |
+
|
368 |
+
#: Dictionary of querystring data to attach to each
|
369 |
+
#: :class:`Request <Request>`. The dictionary values may be lists for
|
370 |
+
#: representing multivalued query parameters.
|
371 |
+
self.params = {}
|
372 |
+
|
373 |
+
#: Stream response content default.
|
374 |
+
self.stream = False
|
375 |
+
|
376 |
+
#: SSL Verification default.
|
377 |
+
#: Defaults to `True`, requiring requests to verify the TLS certificate at the
|
378 |
+
#: remote end.
|
379 |
+
#: If verify is set to `False`, requests will accept any TLS certificate
|
380 |
+
#: presented by the server, and will ignore hostname mismatches and/or
|
381 |
+
#: expired certificates, which will make your application vulnerable to
|
382 |
+
#: man-in-the-middle (MitM) attacks.
|
383 |
+
#: Only set this to `False` for testing.
|
384 |
+
self.verify = True
|
385 |
+
|
386 |
+
#: SSL client certificate default, if String, path to ssl client
|
387 |
+
#: cert file (.pem). If Tuple, ('cert', 'key') pair.
|
388 |
+
self.cert = None
|
389 |
+
|
390 |
+
#: Maximum number of redirects allowed. If the request exceeds this
|
391 |
+
#: limit, a :class:`TooManyRedirects` exception is raised.
|
392 |
+
#: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is
|
393 |
+
#: 30.
|
394 |
+
self.max_redirects = DEFAULT_REDIRECT_LIMIT
|
395 |
+
|
396 |
+
#: Trust environment settings for proxy configuration, default
|
397 |
+
#: authentication and similar.
|
398 |
+
self.trust_env = True
|
399 |
+
|
400 |
+
#: A CookieJar containing all currently outstanding cookies set on this
|
401 |
+
#: session. By default it is a
|
402 |
+
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
|
403 |
+
#: may be any other ``cookielib.CookieJar`` compatible object.
|
404 |
+
self.cookies = cookiejar_from_dict({})
|
405 |
+
|
406 |
+
# Default connection adapters.
|
407 |
+
self.adapters = OrderedDict()
|
408 |
+
self.mount('https://', HTTPAdapter())
|
409 |
+
self.mount('http://', HTTPAdapter())
|
410 |
+
|
411 |
+
def __enter__(self):
|
412 |
+
return self
|
413 |
+
|
414 |
+
def __exit__(self, *args):
|
415 |
+
self.close()
|
416 |
+
|
417 |
+
def prepare_request(self, request):
|
418 |
+
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
|
419 |
+
transmission and returns it. The :class:`PreparedRequest` has settings
|
420 |
+
merged from the :class:`Request <Request>` instance and those of the
|
421 |
+
:class:`Session`.
|
422 |
+
|
423 |
+
:param request: :class:`Request` instance to prepare with this
|
424 |
+
session's settings.
|
425 |
+
:rtype: requests.PreparedRequest
|
426 |
+
"""
|
427 |
+
cookies = request.cookies or {}
|
428 |
+
|
429 |
+
# Bootstrap CookieJar.
|
430 |
+
if not isinstance(cookies, cookielib.CookieJar):
|
431 |
+
cookies = cookiejar_from_dict(cookies)
|
432 |
+
|
433 |
+
# Merge with session cookies
|
434 |
+
merged_cookies = merge_cookies(
|
435 |
+
merge_cookies(RequestsCookieJar(), self.cookies), cookies)
|
436 |
+
|
437 |
+
# Set environment's basic authentication if not explicitly set.
|
438 |
+
auth = request.auth
|
439 |
+
if self.trust_env and not auth and not self.auth:
|
440 |
+
auth = get_netrc_auth(request.url)
|
441 |
+
|
442 |
+
p = PreparedRequest()
|
443 |
+
p.prepare(
|
444 |
+
method=request.method.upper(),
|
445 |
+
url=request.url,
|
446 |
+
files=request.files,
|
447 |
+
data=request.data,
|
448 |
+
json=request.json,
|
449 |
+
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
|
450 |
+
params=merge_setting(request.params, self.params),
|
451 |
+
auth=merge_setting(auth, self.auth),
|
452 |
+
cookies=merged_cookies,
|
453 |
+
hooks=merge_hooks(request.hooks, self.hooks),
|
454 |
+
)
|
455 |
+
return p
|
456 |
+
|
457 |
+
def request(self, method, url,
|
458 |
+
params=None, data=None, headers=None, cookies=None, files=None,
|
459 |
+
auth=None, timeout=None, allow_redirects=True, proxies=None,
|
460 |
+
hooks=None, stream=None, verify=None, cert=None, json=None):
|
461 |
+
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
|
462 |
+
Returns :class:`Response <Response>` object.
|
463 |
+
|
464 |
+
:param method: method for the new :class:`Request` object.
|
465 |
+
:param url: URL for the new :class:`Request` object.
|
466 |
+
:param params: (optional) Dictionary or bytes to be sent in the query
|
467 |
+
string for the :class:`Request`.
|
468 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
469 |
+
object to send in the body of the :class:`Request`.
|
470 |
+
:param json: (optional) json to send in the body of the
|
471 |
+
:class:`Request`.
|
472 |
+
:param headers: (optional) Dictionary of HTTP Headers to send with the
|
473 |
+
:class:`Request`.
|
474 |
+
:param cookies: (optional) Dict or CookieJar object to send with the
|
475 |
+
:class:`Request`.
|
476 |
+
:param files: (optional) Dictionary of ``'filename': file-like-objects``
|
477 |
+
for multipart encoding upload.
|
478 |
+
:param auth: (optional) Auth tuple or callable to enable
|
479 |
+
Basic/Digest/Custom HTTP Auth.
|
480 |
+
:param timeout: (optional) How long to wait for the server to send
|
481 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
482 |
+
read timeout) <timeouts>` tuple.
|
483 |
+
:type timeout: float or tuple
|
484 |
+
:param allow_redirects: (optional) Set to True by default.
|
485 |
+
:type allow_redirects: bool
|
486 |
+
:param proxies: (optional) Dictionary mapping protocol or protocol and
|
487 |
+
hostname to the URL of the proxy.
|
488 |
+
:param stream: (optional) whether to immediately download the response
|
489 |
+
content. Defaults to ``False``.
|
490 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
491 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
492 |
+
to a CA bundle to use. Defaults to ``True``. When set to
|
493 |
+
``False``, requests will accept any TLS certificate presented by
|
494 |
+
the server, and will ignore hostname mismatches and/or expired
|
495 |
+
certificates, which will make your application vulnerable to
|
496 |
+
man-in-the-middle (MitM) attacks. Setting verify to ``False``
|
497 |
+
may be useful during local development or testing.
|
498 |
+
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
499 |
+
If Tuple, ('cert', 'key') pair.
|
500 |
+
:rtype: requests.Response
|
501 |
+
"""
|
502 |
+
# Create the Request.
|
503 |
+
req = Request(
|
504 |
+
method=method.upper(),
|
505 |
+
url=url,
|
506 |
+
headers=headers,
|
507 |
+
files=files,
|
508 |
+
data=data or {},
|
509 |
+
json=json,
|
510 |
+
params=params or {},
|
511 |
+
auth=auth,
|
512 |
+
cookies=cookies,
|
513 |
+
hooks=hooks,
|
514 |
+
)
|
515 |
+
prep = self.prepare_request(req)
|
516 |
+
|
517 |
+
proxies = proxies or {}
|
518 |
+
|
519 |
+
settings = self.merge_environment_settings(
|
520 |
+
prep.url, proxies, stream, verify, cert
|
521 |
+
)
|
522 |
+
|
523 |
+
# Send the request.
|
524 |
+
send_kwargs = {
|
525 |
+
'timeout': timeout,
|
526 |
+
'allow_redirects': allow_redirects,
|
527 |
+
}
|
528 |
+
send_kwargs.update(settings)
|
529 |
+
resp = self.send(prep, **send_kwargs)
|
530 |
+
|
531 |
+
return resp
|
532 |
+
|
533 |
+
def get(self, url, **kwargs):
|
534 |
+
r"""Sends a GET request. Returns :class:`Response` object.
|
535 |
+
|
536 |
+
:param url: URL for the new :class:`Request` object.
|
537 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
538 |
+
:rtype: requests.Response
|
539 |
+
"""
|
540 |
+
|
541 |
+
kwargs.setdefault('allow_redirects', True)
|
542 |
+
return self.request('GET', url, **kwargs)
|
543 |
+
|
544 |
+
def options(self, url, **kwargs):
|
545 |
+
r"""Sends a OPTIONS request. Returns :class:`Response` object.
|
546 |
+
|
547 |
+
:param url: URL for the new :class:`Request` object.
|
548 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
549 |
+
:rtype: requests.Response
|
550 |
+
"""
|
551 |
+
|
552 |
+
kwargs.setdefault('allow_redirects', True)
|
553 |
+
return self.request('OPTIONS', url, **kwargs)
|
554 |
+
|
555 |
+
def head(self, url, **kwargs):
|
556 |
+
r"""Sends a HEAD request. Returns :class:`Response` object.
|
557 |
+
|
558 |
+
:param url: URL for the new :class:`Request` object.
|
559 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
560 |
+
:rtype: requests.Response
|
561 |
+
"""
|
562 |
+
|
563 |
+
kwargs.setdefault('allow_redirects', False)
|
564 |
+
return self.request('HEAD', url, **kwargs)
|
565 |
+
|
566 |
+
def post(self, url, data=None, json=None, **kwargs):
|
567 |
+
r"""Sends a POST request. Returns :class:`Response` object.
|
568 |
+
|
569 |
+
:param url: URL for the new :class:`Request` object.
|
570 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
571 |
+
object to send in the body of the :class:`Request`.
|
572 |
+
:param json: (optional) json to send in the body of the :class:`Request`.
|
573 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
574 |
+
:rtype: requests.Response
|
575 |
+
"""
|
576 |
+
|
577 |
+
return self.request('POST', url, data=data, json=json, **kwargs)
|
578 |
+
|
579 |
+
def put(self, url, data=None, **kwargs):
|
580 |
+
r"""Sends a PUT request. Returns :class:`Response` object.
|
581 |
+
|
582 |
+
:param url: URL for the new :class:`Request` object.
|
583 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
584 |
+
object to send in the body of the :class:`Request`.
|
585 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
586 |
+
:rtype: requests.Response
|
587 |
+
"""
|
588 |
+
|
589 |
+
return self.request('PUT', url, data=data, **kwargs)
|
590 |
+
|
591 |
+
def patch(self, url, data=None, **kwargs):
|
592 |
+
r"""Sends a PATCH request. Returns :class:`Response` object.
|
593 |
+
|
594 |
+
:param url: URL for the new :class:`Request` object.
|
595 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
596 |
+
object to send in the body of the :class:`Request`.
|
597 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
598 |
+
:rtype: requests.Response
|
599 |
+
"""
|
600 |
+
|
601 |
+
return self.request('PATCH', url, data=data, **kwargs)
|
602 |
+
|
603 |
+
def delete(self, url, **kwargs):
|
604 |
+
r"""Sends a DELETE request. Returns :class:`Response` object.
|
605 |
+
|
606 |
+
:param url: URL for the new :class:`Request` object.
|
607 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
608 |
+
:rtype: requests.Response
|
609 |
+
"""
|
610 |
+
|
611 |
+
return self.request('DELETE', url, **kwargs)
|
612 |
+
|
613 |
+
def send(self, request, **kwargs):
|
614 |
+
"""Send a given PreparedRequest.
|
615 |
+
|
616 |
+
:rtype: requests.Response
|
617 |
+
"""
|
618 |
+
# Set defaults that the hooks can utilize to ensure they always have
|
619 |
+
# the correct parameters to reproduce the previous request.
|
620 |
+
kwargs.setdefault('stream', self.stream)
|
621 |
+
kwargs.setdefault('verify', self.verify)
|
622 |
+
kwargs.setdefault('cert', self.cert)
|
623 |
+
if 'proxies' not in kwargs:
|
624 |
+
kwargs['proxies'] = resolve_proxies(
|
625 |
+
request, self.proxies, self.trust_env
|
626 |
+
)
|
627 |
+
|
628 |
+
# It's possible that users might accidentally send a Request object.
|
629 |
+
# Guard against that specific failure case.
|
630 |
+
if isinstance(request, Request):
|
631 |
+
raise ValueError('You can only send PreparedRequests.')
|
632 |
+
|
633 |
+
# Set up variables needed for resolve_redirects and dispatching of hooks
|
634 |
+
allow_redirects = kwargs.pop('allow_redirects', True)
|
635 |
+
stream = kwargs.get('stream')
|
636 |
+
hooks = request.hooks
|
637 |
+
|
638 |
+
# Get the appropriate adapter to use
|
639 |
+
adapter = self.get_adapter(url=request.url)
|
640 |
+
|
641 |
+
# Start time (approximately) of the request
|
642 |
+
start = preferred_clock()
|
643 |
+
|
644 |
+
# Send the request
|
645 |
+
r = adapter.send(request, **kwargs)
|
646 |
+
|
647 |
+
# Total elapsed time of the request (approximately)
|
648 |
+
elapsed = preferred_clock() - start
|
649 |
+
r.elapsed = timedelta(seconds=elapsed)
|
650 |
+
|
651 |
+
# Response manipulation hooks
|
652 |
+
r = dispatch_hook('response', hooks, r, **kwargs)
|
653 |
+
|
654 |
+
# Persist cookies
|
655 |
+
if r.history:
|
656 |
+
|
657 |
+
# If the hooks create history then we want those cookies too
|
658 |
+
for resp in r.history:
|
659 |
+
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
|
660 |
+
|
661 |
+
extract_cookies_to_jar(self.cookies, request, r.raw)
|
662 |
+
|
663 |
+
# Resolve redirects if allowed.
|
664 |
+
if allow_redirects:
|
665 |
+
# Redirect resolving generator.
|
666 |
+
gen = self.resolve_redirects(r, request, **kwargs)
|
667 |
+
history = [resp for resp in gen]
|
668 |
+
else:
|
669 |
+
history = []
|
670 |
+
|
671 |
+
# Shuffle things around if there's history.
|
672 |
+
if history:
|
673 |
+
# Insert the first (original) request at the start
|
674 |
+
history.insert(0, r)
|
675 |
+
# Get the last request made
|
676 |
+
r = history.pop()
|
677 |
+
r.history = history
|
678 |
+
|
679 |
+
# If redirects aren't being followed, store the response on the Request for Response.next().
|
680 |
+
if not allow_redirects:
|
681 |
+
try:
|
682 |
+
r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))
|
683 |
+
except StopIteration:
|
684 |
+
pass
|
685 |
+
|
686 |
+
if not stream:
|
687 |
+
r.content
|
688 |
+
|
689 |
+
return r
|
690 |
+
|
691 |
+
def merge_environment_settings(self, url, proxies, stream, verify, cert):
|
692 |
+
"""
|
693 |
+
Check the environment and merge it with some settings.
|
694 |
+
|
695 |
+
:rtype: dict
|
696 |
+
"""
|
697 |
+
# Gather clues from the surrounding environment.
|
698 |
+
if self.trust_env:
|
699 |
+
# Set environment's proxies.
|
700 |
+
no_proxy = proxies.get('no_proxy') if proxies is not None else None
|
701 |
+
env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
702 |
+
for (k, v) in env_proxies.items():
|
703 |
+
proxies.setdefault(k, v)
|
704 |
+
|
705 |
+
# Look for requests environment configuration and be compatible
|
706 |
+
# with cURL.
|
707 |
+
if verify is True or verify is None:
|
708 |
+
verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
|
709 |
+
os.environ.get('CURL_CA_BUNDLE'))
|
710 |
+
|
711 |
+
# Merge all the kwargs.
|
712 |
+
proxies = merge_setting(proxies, self.proxies)
|
713 |
+
stream = merge_setting(stream, self.stream)
|
714 |
+
verify = merge_setting(verify, self.verify)
|
715 |
+
cert = merge_setting(cert, self.cert)
|
716 |
+
|
717 |
+
return {'verify': verify, 'proxies': proxies, 'stream': stream,
|
718 |
+
'cert': cert}
|
719 |
+
|
720 |
+
def get_adapter(self, url):
|
721 |
+
"""
|
722 |
+
Returns the appropriate connection adapter for the given URL.
|
723 |
+
|
724 |
+
:rtype: requests.adapters.BaseAdapter
|
725 |
+
"""
|
726 |
+
for (prefix, adapter) in self.adapters.items():
|
727 |
+
|
728 |
+
if url.lower().startswith(prefix.lower()):
|
729 |
+
return adapter
|
730 |
+
|
731 |
+
# Nothing matches :-/
|
732 |
+
raise InvalidSchema("No connection adapters were found for {!r}".format(url))
|
733 |
+
|
734 |
+
def close(self):
|
735 |
+
"""Closes all adapters and as such the session"""
|
736 |
+
for v in self.adapters.values():
|
737 |
+
v.close()
|
738 |
+
|
739 |
+
def mount(self, prefix, adapter):
|
740 |
+
"""Registers a connection adapter to a prefix.
|
741 |
+
|
742 |
+
Adapters are sorted in descending order by prefix length.
|
743 |
+
"""
|
744 |
+
self.adapters[prefix] = adapter
|
745 |
+
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
|
746 |
+
|
747 |
+
for key in keys_to_move:
|
748 |
+
self.adapters[key] = self.adapters.pop(key)
|
749 |
+
|
750 |
+
def __getstate__(self):
|
751 |
+
state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
752 |
+
return state
|
753 |
+
|
754 |
+
def __setstate__(self, state):
|
755 |
+
for attr, value in state.items():
|
756 |
+
setattr(self, attr, value)
|
757 |
+
|
758 |
+
|
759 |
+
def session():
|
760 |
+
"""
|
761 |
+
Returns a :class:`Session` for context-management.
|
762 |
+
|
763 |
+
.. deprecated:: 1.0.0
|
764 |
+
|
765 |
+
This method has been deprecated since version 1.0.0 and is only kept for
|
766 |
+
backwards compatibility. New code should use :class:`~requests.sessions.Session`
|
767 |
+
to create a session. This may be removed at a future date.
|
768 |
+
|
769 |
+
:rtype: Session
|
770 |
+
"""
|
771 |
+
return Session()
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/status_codes.py
ADDED
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
r"""
|
4 |
+
The ``codes`` object defines a mapping from common names for HTTP statuses
|
5 |
+
to their numerical codes, accessible either as attributes or as dictionary
|
6 |
+
items.
|
7 |
+
|
8 |
+
Example::
|
9 |
+
|
10 |
+
>>> import requests
|
11 |
+
>>> requests.codes['temporary_redirect']
|
12 |
+
307
|
13 |
+
>>> requests.codes.teapot
|
14 |
+
418
|
15 |
+
>>> requests.codes['\o/']
|
16 |
+
200
|
17 |
+
|
18 |
+
Some codes have multiple names, and both upper- and lower-case versions of
|
19 |
+
the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
|
20 |
+
``codes.okay`` all correspond to the HTTP status code 200.
|
21 |
+
"""
|
22 |
+
|
23 |
+
from .structures import LookupDict
|
24 |
+
|
25 |
+
_codes = {
|
26 |
+
|
27 |
+
# Informational.
|
28 |
+
100: ('continue',),
|
29 |
+
101: ('switching_protocols',),
|
30 |
+
102: ('processing',),
|
31 |
+
103: ('checkpoint',),
|
32 |
+
122: ('uri_too_long', 'request_uri_too_long'),
|
33 |
+
200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
|
34 |
+
201: ('created',),
|
35 |
+
202: ('accepted',),
|
36 |
+
203: ('non_authoritative_info', 'non_authoritative_information'),
|
37 |
+
204: ('no_content',),
|
38 |
+
205: ('reset_content', 'reset'),
|
39 |
+
206: ('partial_content', 'partial'),
|
40 |
+
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
|
41 |
+
208: ('already_reported',),
|
42 |
+
226: ('im_used',),
|
43 |
+
|
44 |
+
# Redirection.
|
45 |
+
300: ('multiple_choices',),
|
46 |
+
301: ('moved_permanently', 'moved', '\\o-'),
|
47 |
+
302: ('found',),
|
48 |
+
303: ('see_other', 'other'),
|
49 |
+
304: ('not_modified',),
|
50 |
+
305: ('use_proxy',),
|
51 |
+
306: ('switch_proxy',),
|
52 |
+
307: ('temporary_redirect', 'temporary_moved', 'temporary'),
|
53 |
+
308: ('permanent_redirect',
|
54 |
+
'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
|
55 |
+
|
56 |
+
# Client Error.
|
57 |
+
400: ('bad_request', 'bad'),
|
58 |
+
401: ('unauthorized',),
|
59 |
+
402: ('payment_required', 'payment'),
|
60 |
+
403: ('forbidden',),
|
61 |
+
404: ('not_found', '-o-'),
|
62 |
+
405: ('method_not_allowed', 'not_allowed'),
|
63 |
+
406: ('not_acceptable',),
|
64 |
+
407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
|
65 |
+
408: ('request_timeout', 'timeout'),
|
66 |
+
409: ('conflict',),
|
67 |
+
410: ('gone',),
|
68 |
+
411: ('length_required',),
|
69 |
+
412: ('precondition_failed', 'precondition'),
|
70 |
+
413: ('request_entity_too_large',),
|
71 |
+
414: ('request_uri_too_large',),
|
72 |
+
415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
|
73 |
+
416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
|
74 |
+
417: ('expectation_failed',),
|
75 |
+
418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
|
76 |
+
421: ('misdirected_request',),
|
77 |
+
422: ('unprocessable_entity', 'unprocessable'),
|
78 |
+
423: ('locked',),
|
79 |
+
424: ('failed_dependency', 'dependency'),
|
80 |
+
425: ('unordered_collection', 'unordered'),
|
81 |
+
426: ('upgrade_required', 'upgrade'),
|
82 |
+
428: ('precondition_required', 'precondition'),
|
83 |
+
429: ('too_many_requests', 'too_many'),
|
84 |
+
431: ('header_fields_too_large', 'fields_too_large'),
|
85 |
+
444: ('no_response', 'none'),
|
86 |
+
449: ('retry_with', 'retry'),
|
87 |
+
450: ('blocked_by_windows_parental_controls', 'parental_controls'),
|
88 |
+
451: ('unavailable_for_legal_reasons', 'legal_reasons'),
|
89 |
+
499: ('client_closed_request',),
|
90 |
+
|
91 |
+
# Server Error.
|
92 |
+
500: ('internal_server_error', 'server_error', '/o\\', '✗'),
|
93 |
+
501: ('not_implemented',),
|
94 |
+
502: ('bad_gateway',),
|
95 |
+
503: ('service_unavailable', 'unavailable'),
|
96 |
+
504: ('gateway_timeout',),
|
97 |
+
505: ('http_version_not_supported', 'http_version'),
|
98 |
+
506: ('variant_also_negotiates',),
|
99 |
+
507: ('insufficient_storage',),
|
100 |
+
509: ('bandwidth_limit_exceeded', 'bandwidth'),
|
101 |
+
510: ('not_extended',),
|
102 |
+
511: ('network_authentication_required', 'network_auth', 'network_authentication'),
|
103 |
+
}
|
104 |
+
|
105 |
+
codes = LookupDict(name='status_codes')
|
106 |
+
|
107 |
+
def _init():
|
108 |
+
for code, titles in _codes.items():
|
109 |
+
for title in titles:
|
110 |
+
setattr(codes, title, code)
|
111 |
+
if not title.startswith(('\\', '/')):
|
112 |
+
setattr(codes, title.upper(), code)
|
113 |
+
|
114 |
+
def doc(code):
|
115 |
+
names = ', '.join('``%s``' % n for n in _codes[code])
|
116 |
+
return '* %d: %s' % (code, names)
|
117 |
+
|
118 |
+
global __doc__
|
119 |
+
__doc__ = (__doc__ + '\n' +
|
120 |
+
'\n'.join(doc(code) for code in sorted(_codes))
|
121 |
+
if __doc__ is not None else None)
|
122 |
+
|
123 |
+
_init()
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/structures.py
ADDED
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.structures
|
5 |
+
~~~~~~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
Data structures that power Requests.
|
8 |
+
"""
|
9 |
+
|
10 |
+
from collections import OrderedDict
|
11 |
+
|
12 |
+
from .compat import Mapping, MutableMapping
|
13 |
+
|
14 |
+
|
15 |
+
class CaseInsensitiveDict(MutableMapping):
|
16 |
+
"""A case-insensitive ``dict``-like object.
|
17 |
+
|
18 |
+
Implements all methods and operations of
|
19 |
+
``MutableMapping`` as well as dict's ``copy``. Also
|
20 |
+
provides ``lower_items``.
|
21 |
+
|
22 |
+
All keys are expected to be strings. The structure remembers the
|
23 |
+
case of the last key to be set, and ``iter(instance)``,
|
24 |
+
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
|
25 |
+
will contain case-sensitive keys. However, querying and contains
|
26 |
+
testing is case insensitive::
|
27 |
+
|
28 |
+
cid = CaseInsensitiveDict()
|
29 |
+
cid['Accept'] = 'application/json'
|
30 |
+
cid['aCCEPT'] == 'application/json' # True
|
31 |
+
list(cid) == ['Accept'] # True
|
32 |
+
|
33 |
+
For example, ``headers['content-encoding']`` will return the
|
34 |
+
value of a ``'Content-Encoding'`` response header, regardless
|
35 |
+
of how the header name was originally stored.
|
36 |
+
|
37 |
+
If the constructor, ``.update``, or equality comparison
|
38 |
+
operations are given keys that have equal ``.lower()``s, the
|
39 |
+
behavior is undefined.
|
40 |
+
"""
|
41 |
+
|
42 |
+
def __init__(self, data=None, **kwargs):
|
43 |
+
self._store = OrderedDict()
|
44 |
+
if data is None:
|
45 |
+
data = {}
|
46 |
+
self.update(data, **kwargs)
|
47 |
+
|
48 |
+
def __setitem__(self, key, value):
|
49 |
+
# Use the lowercased key for lookups, but store the actual
|
50 |
+
# key alongside the value.
|
51 |
+
self._store[key.lower()] = (key, value)
|
52 |
+
|
53 |
+
def __getitem__(self, key):
|
54 |
+
return self._store[key.lower()][1]
|
55 |
+
|
56 |
+
def __delitem__(self, key):
|
57 |
+
del self._store[key.lower()]
|
58 |
+
|
59 |
+
def __iter__(self):
|
60 |
+
return (casedkey for casedkey, mappedvalue in self._store.values())
|
61 |
+
|
62 |
+
def __len__(self):
|
63 |
+
return len(self._store)
|
64 |
+
|
65 |
+
def lower_items(self):
|
66 |
+
"""Like iteritems(), but with all lowercase keys."""
|
67 |
+
return (
|
68 |
+
(lowerkey, keyval[1])
|
69 |
+
for (lowerkey, keyval)
|
70 |
+
in self._store.items()
|
71 |
+
)
|
72 |
+
|
73 |
+
def __eq__(self, other):
|
74 |
+
if isinstance(other, Mapping):
|
75 |
+
other = CaseInsensitiveDict(other)
|
76 |
+
else:
|
77 |
+
return NotImplemented
|
78 |
+
# Compare insensitively
|
79 |
+
return dict(self.lower_items()) == dict(other.lower_items())
|
80 |
+
|
81 |
+
# Copy is required
|
82 |
+
def copy(self):
|
83 |
+
return CaseInsensitiveDict(self._store.values())
|
84 |
+
|
85 |
+
def __repr__(self):
|
86 |
+
return str(dict(self.items()))
|
87 |
+
|
88 |
+
|
89 |
+
class LookupDict(dict):
|
90 |
+
"""Dictionary lookup object."""
|
91 |
+
|
92 |
+
def __init__(self, name=None):
|
93 |
+
self.name = name
|
94 |
+
super(LookupDict, self).__init__()
|
95 |
+
|
96 |
+
def __repr__(self):
|
97 |
+
return '<lookup \'%s\'>' % (self.name)
|
98 |
+
|
99 |
+
def __getitem__(self, key):
|
100 |
+
# We allow fall-through here, so values default to None
|
101 |
+
|
102 |
+
return self.__dict__.get(key, None)
|
103 |
+
|
104 |
+
def get(self, key, default=None):
|
105 |
+
return self.__dict__.get(key, default)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/utils.py
ADDED
@@ -0,0 +1,1060 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.utils
|
5 |
+
~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module provides utility functions that are used within Requests
|
8 |
+
that are also useful for external consumption.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import codecs
|
12 |
+
import contextlib
|
13 |
+
import io
|
14 |
+
import os
|
15 |
+
import re
|
16 |
+
import socket
|
17 |
+
import struct
|
18 |
+
import sys
|
19 |
+
import tempfile
|
20 |
+
import warnings
|
21 |
+
import zipfile
|
22 |
+
from collections import OrderedDict
|
23 |
+
from pip._vendor.urllib3.util import make_headers
|
24 |
+
from pip._vendor.urllib3.util import parse_url
|
25 |
+
|
26 |
+
from .__version__ import __version__
|
27 |
+
from . import certs
|
28 |
+
# to_native_string is unused here, but imported here for backwards compatibility
|
29 |
+
from ._internal_utils import to_native_string
|
30 |
+
from .compat import parse_http_list as _parse_list_header
|
31 |
+
from .compat import (
|
32 |
+
quote, urlparse, bytes, str, unquote, getproxies,
|
33 |
+
proxy_bypass, urlunparse, basestring, integer_types, is_py3,
|
34 |
+
proxy_bypass_environment, getproxies_environment, Mapping)
|
35 |
+
from .cookies import cookiejar_from_dict
|
36 |
+
from .structures import CaseInsensitiveDict
|
37 |
+
from .exceptions import (
|
38 |
+
InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)
|
39 |
+
|
40 |
+
NETRC_FILES = ('.netrc', '_netrc')
|
41 |
+
|
42 |
+
DEFAULT_CA_BUNDLE_PATH = certs.where()
|
43 |
+
|
44 |
+
DEFAULT_PORTS = {'http': 80, 'https': 443}
|
45 |
+
|
46 |
+
# Ensure that ', ' is used to preserve previous delimiter behavior.
|
47 |
+
DEFAULT_ACCEPT_ENCODING = ", ".join(
|
48 |
+
re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
|
49 |
+
)
|
50 |
+
|
51 |
+
|
52 |
+
if sys.platform == 'win32':
|
53 |
+
# provide a proxy_bypass version on Windows without DNS lookups
|
54 |
+
|
55 |
+
def proxy_bypass_registry(host):
|
56 |
+
try:
|
57 |
+
if is_py3:
|
58 |
+
import winreg
|
59 |
+
else:
|
60 |
+
import _winreg as winreg
|
61 |
+
except ImportError:
|
62 |
+
return False
|
63 |
+
|
64 |
+
try:
|
65 |
+
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
|
66 |
+
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
|
67 |
+
# ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
|
68 |
+
proxyEnable = int(winreg.QueryValueEx(internetSettings,
|
69 |
+
'ProxyEnable')[0])
|
70 |
+
# ProxyOverride is almost always a string
|
71 |
+
proxyOverride = winreg.QueryValueEx(internetSettings,
|
72 |
+
'ProxyOverride')[0]
|
73 |
+
except OSError:
|
74 |
+
return False
|
75 |
+
if not proxyEnable or not proxyOverride:
|
76 |
+
return False
|
77 |
+
|
78 |
+
# make a check value list from the registry entry: replace the
|
79 |
+
# '<local>' string by the localhost entry and the corresponding
|
80 |
+
# canonical entry.
|
81 |
+
proxyOverride = proxyOverride.split(';')
|
82 |
+
# now check if we match one of the registry values.
|
83 |
+
for test in proxyOverride:
|
84 |
+
if test == '<local>':
|
85 |
+
if '.' not in host:
|
86 |
+
return True
|
87 |
+
test = test.replace(".", r"\.") # mask dots
|
88 |
+
test = test.replace("*", r".*") # change glob sequence
|
89 |
+
test = test.replace("?", r".") # change glob char
|
90 |
+
if re.match(test, host, re.I):
|
91 |
+
return True
|
92 |
+
return False
|
93 |
+
|
94 |
+
def proxy_bypass(host): # noqa
|
95 |
+
"""Return True, if the host should be bypassed.
|
96 |
+
|
97 |
+
Checks proxy settings gathered from the environment, if specified,
|
98 |
+
or the registry.
|
99 |
+
"""
|
100 |
+
if getproxies_environment():
|
101 |
+
return proxy_bypass_environment(host)
|
102 |
+
else:
|
103 |
+
return proxy_bypass_registry(host)
|
104 |
+
|
105 |
+
|
106 |
+
def dict_to_sequence(d):
|
107 |
+
"""Returns an internal sequence dictionary update."""
|
108 |
+
|
109 |
+
if hasattr(d, 'items'):
|
110 |
+
d = d.items()
|
111 |
+
|
112 |
+
return d
|
113 |
+
|
114 |
+
|
115 |
+
def super_len(o):
|
116 |
+
total_length = None
|
117 |
+
current_position = 0
|
118 |
+
|
119 |
+
if hasattr(o, '__len__'):
|
120 |
+
total_length = len(o)
|
121 |
+
|
122 |
+
elif hasattr(o, 'len'):
|
123 |
+
total_length = o.len
|
124 |
+
|
125 |
+
elif hasattr(o, 'fileno'):
|
126 |
+
try:
|
127 |
+
fileno = o.fileno()
|
128 |
+
except (io.UnsupportedOperation, AttributeError):
|
129 |
+
# AttributeError is a surprising exception, seeing as how we've just checked
|
130 |
+
# that `hasattr(o, 'fileno')`. It happens for objects obtained via
|
131 |
+
# `Tarfile.extractfile()`, per issue 5229.
|
132 |
+
pass
|
133 |
+
else:
|
134 |
+
total_length = os.fstat(fileno).st_size
|
135 |
+
|
136 |
+
# Having used fstat to determine the file length, we need to
|
137 |
+
# confirm that this file was opened up in binary mode.
|
138 |
+
if 'b' not in o.mode:
|
139 |
+
warnings.warn((
|
140 |
+
"Requests has determined the content-length for this "
|
141 |
+
"request using the binary size of the file: however, the "
|
142 |
+
"file has been opened in text mode (i.e. without the 'b' "
|
143 |
+
"flag in the mode). This may lead to an incorrect "
|
144 |
+
"content-length. In Requests 3.0, support will be removed "
|
145 |
+
"for files in text mode."),
|
146 |
+
FileModeWarning
|
147 |
+
)
|
148 |
+
|
149 |
+
if hasattr(o, 'tell'):
|
150 |
+
try:
|
151 |
+
current_position = o.tell()
|
152 |
+
except (OSError, IOError):
|
153 |
+
# This can happen in some weird situations, such as when the file
|
154 |
+
# is actually a special file descriptor like stdin. In this
|
155 |
+
# instance, we don't know what the length is, so set it to zero and
|
156 |
+
# let requests chunk it instead.
|
157 |
+
if total_length is not None:
|
158 |
+
current_position = total_length
|
159 |
+
else:
|
160 |
+
if hasattr(o, 'seek') and total_length is None:
|
161 |
+
# StringIO and BytesIO have seek but no usable fileno
|
162 |
+
try:
|
163 |
+
# seek to end of file
|
164 |
+
o.seek(0, 2)
|
165 |
+
total_length = o.tell()
|
166 |
+
|
167 |
+
# seek back to current position to support
|
168 |
+
# partially read file-like objects
|
169 |
+
o.seek(current_position or 0)
|
170 |
+
except (OSError, IOError):
|
171 |
+
total_length = 0
|
172 |
+
|
173 |
+
if total_length is None:
|
174 |
+
total_length = 0
|
175 |
+
|
176 |
+
return max(0, total_length - current_position)
|
177 |
+
|
178 |
+
|
179 |
+
def get_netrc_auth(url, raise_errors=False):
|
180 |
+
"""Returns the Requests tuple auth for a given url from netrc."""
|
181 |
+
|
182 |
+
netrc_file = os.environ.get('NETRC')
|
183 |
+
if netrc_file is not None:
|
184 |
+
netrc_locations = (netrc_file,)
|
185 |
+
else:
|
186 |
+
netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)
|
187 |
+
|
188 |
+
try:
|
189 |
+
from netrc import netrc, NetrcParseError
|
190 |
+
|
191 |
+
netrc_path = None
|
192 |
+
|
193 |
+
for f in netrc_locations:
|
194 |
+
try:
|
195 |
+
loc = os.path.expanduser(f)
|
196 |
+
except KeyError:
|
197 |
+
# os.path.expanduser can fail when $HOME is undefined and
|
198 |
+
# getpwuid fails. See https://bugs.python.org/issue20164 &
|
199 |
+
# https://github.com/psf/requests/issues/1846
|
200 |
+
return
|
201 |
+
|
202 |
+
if os.path.exists(loc):
|
203 |
+
netrc_path = loc
|
204 |
+
break
|
205 |
+
|
206 |
+
# Abort early if there isn't one.
|
207 |
+
if netrc_path is None:
|
208 |
+
return
|
209 |
+
|
210 |
+
ri = urlparse(url)
|
211 |
+
|
212 |
+
# Strip port numbers from netloc. This weird `if...encode`` dance is
|
213 |
+
# used for Python 3.2, which doesn't support unicode literals.
|
214 |
+
splitstr = b':'
|
215 |
+
if isinstance(url, str):
|
216 |
+
splitstr = splitstr.decode('ascii')
|
217 |
+
host = ri.netloc.split(splitstr)[0]
|
218 |
+
|
219 |
+
try:
|
220 |
+
_netrc = netrc(netrc_path).authenticators(host)
|
221 |
+
if _netrc:
|
222 |
+
# Return with login / password
|
223 |
+
login_i = (0 if _netrc[0] else 1)
|
224 |
+
return (_netrc[login_i], _netrc[2])
|
225 |
+
except (NetrcParseError, IOError):
|
226 |
+
# If there was a parsing error or a permissions issue reading the file,
|
227 |
+
# we'll just skip netrc auth unless explicitly asked to raise errors.
|
228 |
+
if raise_errors:
|
229 |
+
raise
|
230 |
+
|
231 |
+
# App Engine hackiness.
|
232 |
+
except (ImportError, AttributeError):
|
233 |
+
pass
|
234 |
+
|
235 |
+
|
236 |
+
def guess_filename(obj):
|
237 |
+
"""Tries to guess the filename of the given object."""
|
238 |
+
name = getattr(obj, 'name', None)
|
239 |
+
if (name and isinstance(name, basestring) and name[0] != '<' and
|
240 |
+
name[-1] != '>'):
|
241 |
+
return os.path.basename(name)
|
242 |
+
|
243 |
+
|
244 |
+
def extract_zipped_paths(path):
|
245 |
+
"""Replace nonexistent paths that look like they refer to a member of a zip
|
246 |
+
archive with the location of an extracted copy of the target, or else
|
247 |
+
just return the provided path unchanged.
|
248 |
+
"""
|
249 |
+
if os.path.exists(path):
|
250 |
+
# this is already a valid path, no need to do anything further
|
251 |
+
return path
|
252 |
+
|
253 |
+
# find the first valid part of the provided path and treat that as a zip archive
|
254 |
+
# assume the rest of the path is the name of a member in the archive
|
255 |
+
archive, member = os.path.split(path)
|
256 |
+
while archive and not os.path.exists(archive):
|
257 |
+
archive, prefix = os.path.split(archive)
|
258 |
+
if not prefix:
|
259 |
+
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
|
260 |
+
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
|
261 |
+
break
|
262 |
+
member = '/'.join([prefix, member])
|
263 |
+
|
264 |
+
if not zipfile.is_zipfile(archive):
|
265 |
+
return path
|
266 |
+
|
267 |
+
zip_file = zipfile.ZipFile(archive)
|
268 |
+
if member not in zip_file.namelist():
|
269 |
+
return path
|
270 |
+
|
271 |
+
# we have a valid zip archive and a valid member of that archive
|
272 |
+
tmp = tempfile.gettempdir()
|
273 |
+
extracted_path = os.path.join(tmp, member.split('/')[-1])
|
274 |
+
if not os.path.exists(extracted_path):
|
275 |
+
# use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
|
276 |
+
with atomic_open(extracted_path) as file_handler:
|
277 |
+
file_handler.write(zip_file.read(member))
|
278 |
+
return extracted_path
|
279 |
+
|
280 |
+
|
281 |
+
@contextlib.contextmanager
|
282 |
+
def atomic_open(filename):
|
283 |
+
"""Write a file to the disk in an atomic fashion"""
|
284 |
+
replacer = os.rename if sys.version_info[0] == 2 else os.replace
|
285 |
+
tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
|
286 |
+
try:
|
287 |
+
with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:
|
288 |
+
yield tmp_handler
|
289 |
+
replacer(tmp_name, filename)
|
290 |
+
except BaseException:
|
291 |
+
os.remove(tmp_name)
|
292 |
+
raise
|
293 |
+
|
294 |
+
|
295 |
+
def from_key_val_list(value):
|
296 |
+
"""Take an object and test to see if it can be represented as a
|
297 |
+
dictionary. Unless it can not be represented as such, return an
|
298 |
+
OrderedDict, e.g.,
|
299 |
+
|
300 |
+
::
|
301 |
+
|
302 |
+
>>> from_key_val_list([('key', 'val')])
|
303 |
+
OrderedDict([('key', 'val')])
|
304 |
+
>>> from_key_val_list('string')
|
305 |
+
Traceback (most recent call last):
|
306 |
+
...
|
307 |
+
ValueError: cannot encode objects that are not 2-tuples
|
308 |
+
>>> from_key_val_list({'key': 'val'})
|
309 |
+
OrderedDict([('key', 'val')])
|
310 |
+
|
311 |
+
:rtype: OrderedDict
|
312 |
+
"""
|
313 |
+
if value is None:
|
314 |
+
return None
|
315 |
+
|
316 |
+
if isinstance(value, (str, bytes, bool, int)):
|
317 |
+
raise ValueError('cannot encode objects that are not 2-tuples')
|
318 |
+
|
319 |
+
return OrderedDict(value)
|
320 |
+
|
321 |
+
|
322 |
+
def to_key_val_list(value):
|
323 |
+
"""Take an object and test to see if it can be represented as a
|
324 |
+
dictionary. If it can be, return a list of tuples, e.g.,
|
325 |
+
|
326 |
+
::
|
327 |
+
|
328 |
+
>>> to_key_val_list([('key', 'val')])
|
329 |
+
[('key', 'val')]
|
330 |
+
>>> to_key_val_list({'key': 'val'})
|
331 |
+
[('key', 'val')]
|
332 |
+
>>> to_key_val_list('string')
|
333 |
+
Traceback (most recent call last):
|
334 |
+
...
|
335 |
+
ValueError: cannot encode objects that are not 2-tuples
|
336 |
+
|
337 |
+
:rtype: list
|
338 |
+
"""
|
339 |
+
if value is None:
|
340 |
+
return None
|
341 |
+
|
342 |
+
if isinstance(value, (str, bytes, bool, int)):
|
343 |
+
raise ValueError('cannot encode objects that are not 2-tuples')
|
344 |
+
|
345 |
+
if isinstance(value, Mapping):
|
346 |
+
value = value.items()
|
347 |
+
|
348 |
+
return list(value)
|
349 |
+
|
350 |
+
|
351 |
+
# From mitsuhiko/werkzeug (used with permission).
|
352 |
+
def parse_list_header(value):
|
353 |
+
"""Parse lists as described by RFC 2068 Section 2.
|
354 |
+
|
355 |
+
In particular, parse comma-separated lists where the elements of
|
356 |
+
the list may include quoted-strings. A quoted-string could
|
357 |
+
contain a comma. A non-quoted string could have quotes in the
|
358 |
+
middle. Quotes are removed automatically after parsing.
|
359 |
+
|
360 |
+
It basically works like :func:`parse_set_header` just that items
|
361 |
+
may appear multiple times and case sensitivity is preserved.
|
362 |
+
|
363 |
+
The return value is a standard :class:`list`:
|
364 |
+
|
365 |
+
>>> parse_list_header('token, "quoted value"')
|
366 |
+
['token', 'quoted value']
|
367 |
+
|
368 |
+
To create a header from the :class:`list` again, use the
|
369 |
+
:func:`dump_header` function.
|
370 |
+
|
371 |
+
:param value: a string with a list header.
|
372 |
+
:return: :class:`list`
|
373 |
+
:rtype: list
|
374 |
+
"""
|
375 |
+
result = []
|
376 |
+
for item in _parse_list_header(value):
|
377 |
+
if item[:1] == item[-1:] == '"':
|
378 |
+
item = unquote_header_value(item[1:-1])
|
379 |
+
result.append(item)
|
380 |
+
return result
|
381 |
+
|
382 |
+
|
383 |
+
# From mitsuhiko/werkzeug (used with permission).
|
384 |
+
def parse_dict_header(value):
|
385 |
+
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
|
386 |
+
convert them into a python dict:
|
387 |
+
|
388 |
+
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
|
389 |
+
>>> type(d) is dict
|
390 |
+
True
|
391 |
+
>>> sorted(d.items())
|
392 |
+
[('bar', 'as well'), ('foo', 'is a fish')]
|
393 |
+
|
394 |
+
If there is no value for a key it will be `None`:
|
395 |
+
|
396 |
+
>>> parse_dict_header('key_without_value')
|
397 |
+
{'key_without_value': None}
|
398 |
+
|
399 |
+
To create a header from the :class:`dict` again, use the
|
400 |
+
:func:`dump_header` function.
|
401 |
+
|
402 |
+
:param value: a string with a dict header.
|
403 |
+
:return: :class:`dict`
|
404 |
+
:rtype: dict
|
405 |
+
"""
|
406 |
+
result = {}
|
407 |
+
for item in _parse_list_header(value):
|
408 |
+
if '=' not in item:
|
409 |
+
result[item] = None
|
410 |
+
continue
|
411 |
+
name, value = item.split('=', 1)
|
412 |
+
if value[:1] == value[-1:] == '"':
|
413 |
+
value = unquote_header_value(value[1:-1])
|
414 |
+
result[name] = value
|
415 |
+
return result
|
416 |
+
|
417 |
+
|
418 |
+
# From mitsuhiko/werkzeug (used with permission).
|
419 |
+
def unquote_header_value(value, is_filename=False):
|
420 |
+
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
|
421 |
+
This does not use the real unquoting but what browsers are actually
|
422 |
+
using for quoting.
|
423 |
+
|
424 |
+
:param value: the header value to unquote.
|
425 |
+
:rtype: str
|
426 |
+
"""
|
427 |
+
if value and value[0] == value[-1] == '"':
|
428 |
+
# this is not the real unquoting, but fixing this so that the
|
429 |
+
# RFC is met will result in bugs with internet explorer and
|
430 |
+
# probably some other browsers as well. IE for example is
|
431 |
+
# uploading files with "C:\foo\bar.txt" as filename
|
432 |
+
value = value[1:-1]
|
433 |
+
|
434 |
+
# if this is a filename and the starting characters look like
|
435 |
+
# a UNC path, then just return the value without quotes. Using the
|
436 |
+
# replace sequence below on a UNC path has the effect of turning
|
437 |
+
# the leading double slash into a single slash and then
|
438 |
+
# _fix_ie_filename() doesn't work correctly. See #458.
|
439 |
+
if not is_filename or value[:2] != '\\\\':
|
440 |
+
return value.replace('\\\\', '\\').replace('\\"', '"')
|
441 |
+
return value
|
442 |
+
|
443 |
+
|
444 |
+
def dict_from_cookiejar(cj):
|
445 |
+
"""Returns a key/value dictionary from a CookieJar.
|
446 |
+
|
447 |
+
:param cj: CookieJar object to extract cookies from.
|
448 |
+
:rtype: dict
|
449 |
+
"""
|
450 |
+
|
451 |
+
cookie_dict = {}
|
452 |
+
|
453 |
+
for cookie in cj:
|
454 |
+
cookie_dict[cookie.name] = cookie.value
|
455 |
+
|
456 |
+
return cookie_dict
|
457 |
+
|
458 |
+
|
459 |
+
def add_dict_to_cookiejar(cj, cookie_dict):
|
460 |
+
"""Returns a CookieJar from a key/value dictionary.
|
461 |
+
|
462 |
+
:param cj: CookieJar to insert cookies into.
|
463 |
+
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
464 |
+
:rtype: CookieJar
|
465 |
+
"""
|
466 |
+
|
467 |
+
return cookiejar_from_dict(cookie_dict, cj)
|
468 |
+
|
469 |
+
|
470 |
+
def get_encodings_from_content(content):
|
471 |
+
"""Returns encodings from given content string.
|
472 |
+
|
473 |
+
:param content: bytestring to extract encodings from.
|
474 |
+
"""
|
475 |
+
warnings.warn((
|
476 |
+
'In requests 3.0, get_encodings_from_content will be removed. For '
|
477 |
+
'more information, please see the discussion on issue #2266. (This'
|
478 |
+
' warning should only appear once.)'),
|
479 |
+
DeprecationWarning)
|
480 |
+
|
481 |
+
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
|
482 |
+
pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
|
483 |
+
xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
|
484 |
+
|
485 |
+
return (charset_re.findall(content) +
|
486 |
+
pragma_re.findall(content) +
|
487 |
+
xml_re.findall(content))
|
488 |
+
|
489 |
+
|
490 |
+
def _parse_content_type_header(header):
|
491 |
+
"""Returns content type and parameters from given header
|
492 |
+
|
493 |
+
:param header: string
|
494 |
+
:return: tuple containing content type and dictionary of
|
495 |
+
parameters
|
496 |
+
"""
|
497 |
+
|
498 |
+
tokens = header.split(';')
|
499 |
+
content_type, params = tokens[0].strip(), tokens[1:]
|
500 |
+
params_dict = {}
|
501 |
+
items_to_strip = "\"' "
|
502 |
+
|
503 |
+
for param in params:
|
504 |
+
param = param.strip()
|
505 |
+
if param:
|
506 |
+
key, value = param, True
|
507 |
+
index_of_equals = param.find("=")
|
508 |
+
if index_of_equals != -1:
|
509 |
+
key = param[:index_of_equals].strip(items_to_strip)
|
510 |
+
value = param[index_of_equals + 1:].strip(items_to_strip)
|
511 |
+
params_dict[key.lower()] = value
|
512 |
+
return content_type, params_dict
|
513 |
+
|
514 |
+
|
515 |
+
def get_encoding_from_headers(headers):
|
516 |
+
"""Returns encodings from given HTTP Header Dict.
|
517 |
+
|
518 |
+
:param headers: dictionary to extract encoding from.
|
519 |
+
:rtype: str
|
520 |
+
"""
|
521 |
+
|
522 |
+
content_type = headers.get('content-type')
|
523 |
+
|
524 |
+
if not content_type:
|
525 |
+
return None
|
526 |
+
|
527 |
+
content_type, params = _parse_content_type_header(content_type)
|
528 |
+
|
529 |
+
if 'charset' in params:
|
530 |
+
return params['charset'].strip("'\"")
|
531 |
+
|
532 |
+
if 'text' in content_type:
|
533 |
+
return 'ISO-8859-1'
|
534 |
+
|
535 |
+
if 'application/json' in content_type:
|
536 |
+
# Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
|
537 |
+
return 'utf-8'
|
538 |
+
|
539 |
+
|
540 |
+
def stream_decode_response_unicode(iterator, r):
|
541 |
+
"""Stream decodes a iterator."""
|
542 |
+
|
543 |
+
if r.encoding is None:
|
544 |
+
for item in iterator:
|
545 |
+
yield item
|
546 |
+
return
|
547 |
+
|
548 |
+
decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
|
549 |
+
for chunk in iterator:
|
550 |
+
rv = decoder.decode(chunk)
|
551 |
+
if rv:
|
552 |
+
yield rv
|
553 |
+
rv = decoder.decode(b'', final=True)
|
554 |
+
if rv:
|
555 |
+
yield rv
|
556 |
+
|
557 |
+
|
558 |
+
def iter_slices(string, slice_length):
|
559 |
+
"""Iterate over slices of a string."""
|
560 |
+
pos = 0
|
561 |
+
if slice_length is None or slice_length <= 0:
|
562 |
+
slice_length = len(string)
|
563 |
+
while pos < len(string):
|
564 |
+
yield string[pos:pos + slice_length]
|
565 |
+
pos += slice_length
|
566 |
+
|
567 |
+
|
568 |
+
def get_unicode_from_response(r):
|
569 |
+
"""Returns the requested content back in unicode.
|
570 |
+
|
571 |
+
:param r: Response object to get unicode content from.
|
572 |
+
|
573 |
+
Tried:
|
574 |
+
|
575 |
+
1. charset from content-type
|
576 |
+
2. fall back and replace all unicode characters
|
577 |
+
|
578 |
+
:rtype: str
|
579 |
+
"""
|
580 |
+
warnings.warn((
|
581 |
+
'In requests 3.0, get_unicode_from_response will be removed. For '
|
582 |
+
'more information, please see the discussion on issue #2266. (This'
|
583 |
+
' warning should only appear once.)'),
|
584 |
+
DeprecationWarning)
|
585 |
+
|
586 |
+
tried_encodings = []
|
587 |
+
|
588 |
+
# Try charset from content-type
|
589 |
+
encoding = get_encoding_from_headers(r.headers)
|
590 |
+
|
591 |
+
if encoding:
|
592 |
+
try:
|
593 |
+
return str(r.content, encoding)
|
594 |
+
except UnicodeError:
|
595 |
+
tried_encodings.append(encoding)
|
596 |
+
|
597 |
+
# Fall back:
|
598 |
+
try:
|
599 |
+
return str(r.content, encoding, errors='replace')
|
600 |
+
except TypeError:
|
601 |
+
return r.content
|
602 |
+
|
603 |
+
|
604 |
+
# The unreserved URI characters (RFC 3986)
|
605 |
+
UNRESERVED_SET = frozenset(
|
606 |
+
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~")
|
607 |
+
|
608 |
+
|
609 |
+
def unquote_unreserved(uri):
|
610 |
+
"""Un-escape any percent-escape sequences in a URI that are unreserved
|
611 |
+
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
|
612 |
+
|
613 |
+
:rtype: str
|
614 |
+
"""
|
615 |
+
parts = uri.split('%')
|
616 |
+
for i in range(1, len(parts)):
|
617 |
+
h = parts[i][0:2]
|
618 |
+
if len(h) == 2 and h.isalnum():
|
619 |
+
try:
|
620 |
+
c = chr(int(h, 16))
|
621 |
+
except ValueError:
|
622 |
+
raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
|
623 |
+
|
624 |
+
if c in UNRESERVED_SET:
|
625 |
+
parts[i] = c + parts[i][2:]
|
626 |
+
else:
|
627 |
+
parts[i] = '%' + parts[i]
|
628 |
+
else:
|
629 |
+
parts[i] = '%' + parts[i]
|
630 |
+
return ''.join(parts)
|
631 |
+
|
632 |
+
|
633 |
+
def requote_uri(uri):
|
634 |
+
"""Re-quote the given URI.
|
635 |
+
|
636 |
+
This function passes the given URI through an unquote/quote cycle to
|
637 |
+
ensure that it is fully and consistently quoted.
|
638 |
+
|
639 |
+
:rtype: str
|
640 |
+
"""
|
641 |
+
safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
|
642 |
+
safe_without_percent = "!#$&'()*+,/:;=?@[]~"
|
643 |
+
try:
|
644 |
+
# Unquote only the unreserved characters
|
645 |
+
# Then quote only illegal characters (do not quote reserved,
|
646 |
+
# unreserved, or '%')
|
647 |
+
return quote(unquote_unreserved(uri), safe=safe_with_percent)
|
648 |
+
except InvalidURL:
|
649 |
+
# We couldn't unquote the given URI, so let's try quoting it, but
|
650 |
+
# there may be unquoted '%'s in the URI. We need to make sure they're
|
651 |
+
# properly quoted so they do not cause issues elsewhere.
|
652 |
+
return quote(uri, safe=safe_without_percent)
|
653 |
+
|
654 |
+
|
655 |
+
def address_in_network(ip, net):
|
656 |
+
"""This function allows you to check if an IP belongs to a network subnet
|
657 |
+
|
658 |
+
Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
|
659 |
+
returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
|
660 |
+
|
661 |
+
:rtype: bool
|
662 |
+
"""
|
663 |
+
ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
|
664 |
+
netaddr, bits = net.split('/')
|
665 |
+
netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
|
666 |
+
network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
|
667 |
+
return (ipaddr & netmask) == (network & netmask)
|
668 |
+
|
669 |
+
|
670 |
+
def dotted_netmask(mask):
|
671 |
+
"""Converts mask from /xx format to xxx.xxx.xxx.xxx
|
672 |
+
|
673 |
+
Example: if mask is 24 function returns 255.255.255.0
|
674 |
+
|
675 |
+
:rtype: str
|
676 |
+
"""
|
677 |
+
bits = 0xffffffff ^ (1 << 32 - mask) - 1
|
678 |
+
return socket.inet_ntoa(struct.pack('>I', bits))
|
679 |
+
|
680 |
+
|
681 |
+
def is_ipv4_address(string_ip):
|
682 |
+
"""
|
683 |
+
:rtype: bool
|
684 |
+
"""
|
685 |
+
try:
|
686 |
+
socket.inet_aton(string_ip)
|
687 |
+
except socket.error:
|
688 |
+
return False
|
689 |
+
return True
|
690 |
+
|
691 |
+
|
692 |
+
def is_valid_cidr(string_network):
|
693 |
+
"""
|
694 |
+
Very simple check of the cidr format in no_proxy variable.
|
695 |
+
|
696 |
+
:rtype: bool
|
697 |
+
"""
|
698 |
+
if string_network.count('/') == 1:
|
699 |
+
try:
|
700 |
+
mask = int(string_network.split('/')[1])
|
701 |
+
except ValueError:
|
702 |
+
return False
|
703 |
+
|
704 |
+
if mask < 1 or mask > 32:
|
705 |
+
return False
|
706 |
+
|
707 |
+
try:
|
708 |
+
socket.inet_aton(string_network.split('/')[0])
|
709 |
+
except socket.error:
|
710 |
+
return False
|
711 |
+
else:
|
712 |
+
return False
|
713 |
+
return True
|
714 |
+
|
715 |
+
|
716 |
+
@contextlib.contextmanager
|
717 |
+
def set_environ(env_name, value):
|
718 |
+
"""Set the environment variable 'env_name' to 'value'
|
719 |
+
|
720 |
+
Save previous value, yield, and then restore the previous value stored in
|
721 |
+
the environment variable 'env_name'.
|
722 |
+
|
723 |
+
If 'value' is None, do nothing"""
|
724 |
+
value_changed = value is not None
|
725 |
+
if value_changed:
|
726 |
+
old_value = os.environ.get(env_name)
|
727 |
+
os.environ[env_name] = value
|
728 |
+
try:
|
729 |
+
yield
|
730 |
+
finally:
|
731 |
+
if value_changed:
|
732 |
+
if old_value is None:
|
733 |
+
del os.environ[env_name]
|
734 |
+
else:
|
735 |
+
os.environ[env_name] = old_value
|
736 |
+
|
737 |
+
|
738 |
+
def should_bypass_proxies(url, no_proxy):
|
739 |
+
"""
|
740 |
+
Returns whether we should bypass proxies or not.
|
741 |
+
|
742 |
+
:rtype: bool
|
743 |
+
"""
|
744 |
+
# Prioritize lowercase environment variables over uppercase
|
745 |
+
# to keep a consistent behaviour with other http projects (curl, wget).
|
746 |
+
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
|
747 |
+
|
748 |
+
# First check whether no_proxy is defined. If it is, check that the URL
|
749 |
+
# we're getting isn't in the no_proxy list.
|
750 |
+
no_proxy_arg = no_proxy
|
751 |
+
if no_proxy is None:
|
752 |
+
no_proxy = get_proxy('no_proxy')
|
753 |
+
parsed = urlparse(url)
|
754 |
+
|
755 |
+
if parsed.hostname is None:
|
756 |
+
# URLs don't always have hostnames, e.g. file:/// urls.
|
757 |
+
return True
|
758 |
+
|
759 |
+
if no_proxy:
|
760 |
+
# We need to check whether we match here. We need to see if we match
|
761 |
+
# the end of the hostname, both with and without the port.
|
762 |
+
no_proxy = (
|
763 |
+
host for host in no_proxy.replace(' ', '').split(',') if host
|
764 |
+
)
|
765 |
+
|
766 |
+
if is_ipv4_address(parsed.hostname):
|
767 |
+
for proxy_ip in no_proxy:
|
768 |
+
if is_valid_cidr(proxy_ip):
|
769 |
+
if address_in_network(parsed.hostname, proxy_ip):
|
770 |
+
return True
|
771 |
+
elif parsed.hostname == proxy_ip:
|
772 |
+
# If no_proxy ip was defined in plain IP notation instead of cidr notation &
|
773 |
+
# matches the IP of the index
|
774 |
+
return True
|
775 |
+
else:
|
776 |
+
host_with_port = parsed.hostname
|
777 |
+
if parsed.port:
|
778 |
+
host_with_port += ':{}'.format(parsed.port)
|
779 |
+
|
780 |
+
for host in no_proxy:
|
781 |
+
if parsed.hostname.endswith(host) or host_with_port.endswith(host):
|
782 |
+
# The URL does match something in no_proxy, so we don't want
|
783 |
+
# to apply the proxies on this URL.
|
784 |
+
return True
|
785 |
+
|
786 |
+
with set_environ('no_proxy', no_proxy_arg):
|
787 |
+
# parsed.hostname can be `None` in cases such as a file URI.
|
788 |
+
try:
|
789 |
+
bypass = proxy_bypass(parsed.hostname)
|
790 |
+
except (TypeError, socket.gaierror):
|
791 |
+
bypass = False
|
792 |
+
|
793 |
+
if bypass:
|
794 |
+
return True
|
795 |
+
|
796 |
+
return False
|
797 |
+
|
798 |
+
|
799 |
+
def get_environ_proxies(url, no_proxy=None):
|
800 |
+
"""
|
801 |
+
Return a dict of environment proxies.
|
802 |
+
|
803 |
+
:rtype: dict
|
804 |
+
"""
|
805 |
+
if should_bypass_proxies(url, no_proxy=no_proxy):
|
806 |
+
return {}
|
807 |
+
else:
|
808 |
+
return getproxies()
|
809 |
+
|
810 |
+
|
811 |
+
def select_proxy(url, proxies):
|
812 |
+
"""Select a proxy for the url, if applicable.
|
813 |
+
|
814 |
+
:param url: The url being for the request
|
815 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
816 |
+
"""
|
817 |
+
proxies = proxies or {}
|
818 |
+
urlparts = urlparse(url)
|
819 |
+
if urlparts.hostname is None:
|
820 |
+
return proxies.get(urlparts.scheme, proxies.get('all'))
|
821 |
+
|
822 |
+
proxy_keys = [
|
823 |
+
urlparts.scheme + '://' + urlparts.hostname,
|
824 |
+
urlparts.scheme,
|
825 |
+
'all://' + urlparts.hostname,
|
826 |
+
'all',
|
827 |
+
]
|
828 |
+
proxy = None
|
829 |
+
for proxy_key in proxy_keys:
|
830 |
+
if proxy_key in proxies:
|
831 |
+
proxy = proxies[proxy_key]
|
832 |
+
break
|
833 |
+
|
834 |
+
return proxy
|
835 |
+
|
836 |
+
|
837 |
+
def resolve_proxies(request, proxies, trust_env=True):
|
838 |
+
"""This method takes proxy information from a request and configuration
|
839 |
+
input to resolve a mapping of target proxies. This will consider settings
|
840 |
+
such a NO_PROXY to strip proxy configurations.
|
841 |
+
|
842 |
+
:param request: Request or PreparedRequest
|
843 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
844 |
+
:param trust_env: Boolean declaring whether to trust environment configs
|
845 |
+
|
846 |
+
:rtype: dict
|
847 |
+
"""
|
848 |
+
proxies = proxies if proxies is not None else {}
|
849 |
+
url = request.url
|
850 |
+
scheme = urlparse(url).scheme
|
851 |
+
no_proxy = proxies.get('no_proxy')
|
852 |
+
new_proxies = proxies.copy()
|
853 |
+
|
854 |
+
if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
|
855 |
+
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
856 |
+
|
857 |
+
proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
|
858 |
+
|
859 |
+
if proxy:
|
860 |
+
new_proxies.setdefault(scheme, proxy)
|
861 |
+
return new_proxies
|
862 |
+
|
863 |
+
|
864 |
+
def default_user_agent(name="python-requests"):
|
865 |
+
"""
|
866 |
+
Return a string representing the default user agent.
|
867 |
+
|
868 |
+
:rtype: str
|
869 |
+
"""
|
870 |
+
return '%s/%s' % (name, __version__)
|
871 |
+
|
872 |
+
|
873 |
+
def default_headers():
|
874 |
+
"""
|
875 |
+
:rtype: requests.structures.CaseInsensitiveDict
|
876 |
+
"""
|
877 |
+
return CaseInsensitiveDict({
|
878 |
+
'User-Agent': default_user_agent(),
|
879 |
+
'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,
|
880 |
+
'Accept': '*/*',
|
881 |
+
'Connection': 'keep-alive',
|
882 |
+
})
|
883 |
+
|
884 |
+
|
885 |
+
def parse_header_links(value):
|
886 |
+
"""Return a list of parsed link headers proxies.
|
887 |
+
|
888 |
+
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
|
889 |
+
|
890 |
+
:rtype: list
|
891 |
+
"""
|
892 |
+
|
893 |
+
links = []
|
894 |
+
|
895 |
+
replace_chars = ' \'"'
|
896 |
+
|
897 |
+
value = value.strip(replace_chars)
|
898 |
+
if not value:
|
899 |
+
return links
|
900 |
+
|
901 |
+
for val in re.split(', *<', value):
|
902 |
+
try:
|
903 |
+
url, params = val.split(';', 1)
|
904 |
+
except ValueError:
|
905 |
+
url, params = val, ''
|
906 |
+
|
907 |
+
link = {'url': url.strip('<> \'"')}
|
908 |
+
|
909 |
+
for param in params.split(';'):
|
910 |
+
try:
|
911 |
+
key, value = param.split('=')
|
912 |
+
except ValueError:
|
913 |
+
break
|
914 |
+
|
915 |
+
link[key.strip(replace_chars)] = value.strip(replace_chars)
|
916 |
+
|
917 |
+
links.append(link)
|
918 |
+
|
919 |
+
return links
|
920 |
+
|
921 |
+
|
922 |
+
# Null bytes; no need to recreate these on each call to guess_json_utf
|
923 |
+
_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
|
924 |
+
_null2 = _null * 2
|
925 |
+
_null3 = _null * 3
|
926 |
+
|
927 |
+
|
928 |
+
def guess_json_utf(data):
|
929 |
+
"""
|
930 |
+
:rtype: str
|
931 |
+
"""
|
932 |
+
# JSON always starts with two ASCII characters, so detection is as
|
933 |
+
# easy as counting the nulls and from their location and count
|
934 |
+
# determine the encoding. Also detect a BOM, if present.
|
935 |
+
sample = data[:4]
|
936 |
+
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
|
937 |
+
return 'utf-32' # BOM included
|
938 |
+
if sample[:3] == codecs.BOM_UTF8:
|
939 |
+
return 'utf-8-sig' # BOM included, MS style (discouraged)
|
940 |
+
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
|
941 |
+
return 'utf-16' # BOM included
|
942 |
+
nullcount = sample.count(_null)
|
943 |
+
if nullcount == 0:
|
944 |
+
return 'utf-8'
|
945 |
+
if nullcount == 2:
|
946 |
+
if sample[::2] == _null2: # 1st and 3rd are null
|
947 |
+
return 'utf-16-be'
|
948 |
+
if sample[1::2] == _null2: # 2nd and 4th are null
|
949 |
+
return 'utf-16-le'
|
950 |
+
# Did not detect 2 valid UTF-16 ascii-range characters
|
951 |
+
if nullcount == 3:
|
952 |
+
if sample[:3] == _null3:
|
953 |
+
return 'utf-32-be'
|
954 |
+
if sample[1:] == _null3:
|
955 |
+
return 'utf-32-le'
|
956 |
+
# Did not detect a valid UTF-32 ascii-range character
|
957 |
+
return None
|
958 |
+
|
959 |
+
|
960 |
+
def prepend_scheme_if_needed(url, new_scheme):
|
961 |
+
"""Given a URL that may or may not have a scheme, prepend the given scheme.
|
962 |
+
Does not replace a present scheme with the one provided as an argument.
|
963 |
+
|
964 |
+
:rtype: str
|
965 |
+
"""
|
966 |
+
parsed = parse_url(url)
|
967 |
+
scheme, auth, host, port, path, query, fragment = parsed
|
968 |
+
|
969 |
+
# A defect in urlparse determines that there isn't a netloc present in some
|
970 |
+
# urls. We previously assumed parsing was overly cautious, and swapped the
|
971 |
+
# netloc and path. Due to a lack of tests on the original defect, this is
|
972 |
+
# maintained with parse_url for backwards compatibility.
|
973 |
+
netloc = parsed.netloc
|
974 |
+
if not netloc:
|
975 |
+
netloc, path = path, netloc
|
976 |
+
|
977 |
+
if auth:
|
978 |
+
# parse_url doesn't provide the netloc with auth
|
979 |
+
# so we'll add it ourselves.
|
980 |
+
netloc = '@'.join([auth, netloc])
|
981 |
+
if scheme is None:
|
982 |
+
scheme = new_scheme
|
983 |
+
if path is None:
|
984 |
+
path = ''
|
985 |
+
|
986 |
+
return urlunparse((scheme, netloc, path, '', query, fragment))
|
987 |
+
|
988 |
+
|
989 |
+
def get_auth_from_url(url):
|
990 |
+
"""Given a url with authentication components, extract them into a tuple of
|
991 |
+
username,password.
|
992 |
+
|
993 |
+
:rtype: (str,str)
|
994 |
+
"""
|
995 |
+
parsed = urlparse(url)
|
996 |
+
|
997 |
+
try:
|
998 |
+
auth = (unquote(parsed.username), unquote(parsed.password))
|
999 |
+
except (AttributeError, TypeError):
|
1000 |
+
auth = ('', '')
|
1001 |
+
|
1002 |
+
return auth
|
1003 |
+
|
1004 |
+
|
1005 |
+
# Moved outside of function to avoid recompile every call
|
1006 |
+
_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
|
1007 |
+
_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')
|
1008 |
+
|
1009 |
+
|
1010 |
+
def check_header_validity(header):
|
1011 |
+
"""Verifies that header value is a string which doesn't contain
|
1012 |
+
leading whitespace or return characters. This prevents unintended
|
1013 |
+
header injection.
|
1014 |
+
|
1015 |
+
:param header: tuple, in the format (name, value).
|
1016 |
+
"""
|
1017 |
+
name, value = header
|
1018 |
+
|
1019 |
+
if isinstance(value, bytes):
|
1020 |
+
pat = _CLEAN_HEADER_REGEX_BYTE
|
1021 |
+
else:
|
1022 |
+
pat = _CLEAN_HEADER_REGEX_STR
|
1023 |
+
try:
|
1024 |
+
if not pat.match(value):
|
1025 |
+
raise InvalidHeader("Invalid return character or leading space in header: %s" % name)
|
1026 |
+
except TypeError:
|
1027 |
+
raise InvalidHeader("Value for header {%s: %s} must be of type str or "
|
1028 |
+
"bytes, not %s" % (name, value, type(value)))
|
1029 |
+
|
1030 |
+
|
1031 |
+
def urldefragauth(url):
|
1032 |
+
"""
|
1033 |
+
Given a url remove the fragment and the authentication part.
|
1034 |
+
|
1035 |
+
:rtype: str
|
1036 |
+
"""
|
1037 |
+
scheme, netloc, path, params, query, fragment = urlparse(url)
|
1038 |
+
|
1039 |
+
# see func:`prepend_scheme_if_needed`
|
1040 |
+
if not netloc:
|
1041 |
+
netloc, path = path, netloc
|
1042 |
+
|
1043 |
+
netloc = netloc.rsplit('@', 1)[-1]
|
1044 |
+
|
1045 |
+
return urlunparse((scheme, netloc, path, params, query, ''))
|
1046 |
+
|
1047 |
+
|
1048 |
+
def rewind_body(prepared_request):
|
1049 |
+
"""Move file pointer back to its recorded starting position
|
1050 |
+
so it can be read again on redirect.
|
1051 |
+
"""
|
1052 |
+
body_seek = getattr(prepared_request.body, 'seek', None)
|
1053 |
+
if body_seek is not None and isinstance(prepared_request._body_position, integer_types):
|
1054 |
+
try:
|
1055 |
+
body_seek(prepared_request._body_position)
|
1056 |
+
except (IOError, OSError):
|
1057 |
+
raise UnrewindableBodyError("An error occurred when rewinding request "
|
1058 |
+
"body for redirect.")
|
1059 |
+
else:
|
1060 |
+
raise UnrewindableBodyError("Unable to rewind request body for redirect.")
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (2.18 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-310.pyc
ADDED
Binary file (11.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-310.pyc
ADDED
Binary file (206 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-310.pyc
ADDED
Binary file (13.6 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-310.pyc
ADDED
Binary file (25.5 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-310.pyc
ADDED
Binary file (11 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-310.pyc
ADDED
Binary file (8.18 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-310.pyc
ADDED
Binary file (2.74 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-310.pyc
ADDED
Binary file (15.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-310.pyc
ADDED
Binary file (5.62 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-310.pyc
ADDED
Binary file (20.9 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__init__.py
ADDED
File without changes
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (192 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc
ADDED
Binary file (1.37 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc
ADDED
Binary file (8.19 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc
ADDED
Binary file (3.63 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc
ADDED
Binary file (15.5 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc
ADDED
Binary file (21.9 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-310.pyc
ADDED
Binary file (5.59 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This module provides means to detect the App Engine environment.
|
3 |
+
"""
|
4 |
+
|
5 |
+
import os
|
6 |
+
|
7 |
+
|
8 |
+
def is_appengine():
|
9 |
+
return is_local_appengine() or is_prod_appengine()
|
10 |
+
|
11 |
+
|
12 |
+
def is_appengine_sandbox():
|
13 |
+
"""Reports if the app is running in the first generation sandbox.
|
14 |
+
|
15 |
+
The second generation runtimes are technically still in a sandbox, but it
|
16 |
+
is much less restrictive, so generally you shouldn't need to check for it.
|
17 |
+
see https://cloud.google.com/appengine/docs/standard/runtimes
|
18 |
+
"""
|
19 |
+
return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
|
20 |
+
|
21 |
+
|
22 |
+
def is_local_appengine():
|
23 |
+
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
|
24 |
+
"SERVER_SOFTWARE", ""
|
25 |
+
).startswith("Development/")
|
26 |
+
|
27 |
+
|
28 |
+
def is_prod_appengine():
|
29 |
+
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
|
30 |
+
"SERVER_SOFTWARE", ""
|
31 |
+
).startswith("Google App Engine/")
|
32 |
+
|
33 |
+
|
34 |
+
def is_prod_appengine_mvms():
|
35 |
+
"""Deprecated."""
|
36 |
+
return False
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
ADDED
File without changes
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (209 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc
ADDED
Binary file (10.7 kB). View file
|
|