krish-emissary commited on
Commit
1a2c8ba
·
verified ·
1 Parent(s): 7fad61e

Delete emissary-ml

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. emissary-ml/llm-scripts/fine-tuning/llama3/__pycache__/classification_graphs.cpython-310.pyc +0 -0
  2. emissary-ml/llm-scripts/fine-tuning/llama3/__pycache__/classification_graphs_binary.cpython-310.pyc +0 -0
  3. emissary-ml/llm-scripts/fine-tuning/llama3/__pycache__/get_max_tokens.cpython-310.pyc +0 -0
  4. emissary-ml/llm-scripts/fine-tuning/llama3/checkpoints/tokenizer.json +0 -0
  5. emissary-ml/llm-scripts/fine-tuning/llama3/gpu_monitoring.py +0 -134
  6. emissary-ml/llm-scripts/fine-tuning/llama3/outputs/special_tokens_map.json +0 -30
  7. emissary-ml/llm-scripts/fine-tuning/llama3/outputs/tokenizer.json +0 -0
  8. emissary-ml/llm-scripts/fine-tuning/llama3/test_script.py +0 -1
  9. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/_virtualenv.py +0 -130
  10. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/appdirs.py +0 -608
  11. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/decorator.py +0 -459
  12. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/distutils-precedence.pth +0 -1
  13. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__init__.py +0 -105
  14. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc +0 -0
  15. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc +0 -0
  16. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc +0 -0
  17. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_auth.cpython-310.pyc +0 -0
  18. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc +0 -0
  19. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc +0 -0
  20. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc +0 -0
  21. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc +0 -0
  22. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc +0 -0
  23. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_main.cpython-310.pyc +0 -0
  24. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc +0 -0
  25. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_multipart.cpython-310.pyc +0 -0
  26. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc +0 -0
  27. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc +0 -0
  28. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc +0 -0
  29. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_urls.cpython-310.pyc +0 -0
  30. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc +0 -0
  31. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__version__.py +0 -3
  32. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_api.py +0 -438
  33. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_auth.py +0 -348
  34. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_client.py +0 -2019
  35. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_config.py +0 -248
  36. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_content.py +0 -240
  37. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_decoders.py +0 -393
  38. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_exceptions.py +0 -379
  39. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_main.py +0 -506
  40. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_models.py +0 -1277
  41. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_multipart.py +0 -300
  42. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_status_codes.py +0 -162
  43. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__init__.py +0 -15
  44. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/__init__.cpython-310.pyc +0 -0
  45. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/asgi.cpython-310.pyc +0 -0
  46. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/base.cpython-310.pyc +0 -0
  47. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/default.cpython-310.pyc +0 -0
  48. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/mock.cpython-310.pyc +0 -0
  49. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/wsgi.cpython-310.pyc +0 -0
  50. emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/asgi.py +0 -187
emissary-ml/llm-scripts/fine-tuning/llama3/__pycache__/classification_graphs.cpython-310.pyc DELETED
Binary file (9.62 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/__pycache__/classification_graphs_binary.cpython-310.pyc DELETED
Binary file (5.6 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/__pycache__/get_max_tokens.cpython-310.pyc DELETED
Binary file (2.48 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/checkpoints/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
emissary-ml/llm-scripts/fine-tuning/llama3/gpu_monitoring.py DELETED
@@ -1,134 +0,0 @@
1
- #!/usr/bin/env python3
2
- """
3
- GPU Memory Monitoring Script for Model Parallelization Experiments
4
- """
5
-
6
- import subprocess
7
- import time
8
- import csv
9
- import datetime
10
- import argparse
11
-
12
-
13
- def get_gpu_memory_info():
14
- """Get current GPU memory usage using nvidia-smi"""
15
- try:
16
- result = subprocess.run(
17
- ['nvidia-smi', '--query-gpu=index,name,memory.used,memory.total,utilization.gpu',
18
- '--format=csv,noheader,nounits'],
19
- capture_output=True, text=True, check=True
20
- )
21
-
22
- gpu_info = []
23
- for line in result.stdout.strip().split('\n'):
24
- parts = line.split(', ')
25
- gpu_info.append({
26
- 'index': int(parts[0]),
27
- 'name': parts[1],
28
- 'memory_used_mb': int(parts[2]),
29
- 'memory_total_mb': int(parts[3]),
30
- 'gpu_utilization': int(parts[4])
31
- })
32
- return gpu_info
33
- except Exception as e:
34
- print(f"Error getting GPU info: {e}")
35
- return []
36
-
37
- def monitor_gpus(output_file, interval=5, experiment_name=""):
38
- """Monitor GPU memory usage and save to CSV"""
39
-
40
- with open(output_file, 'w', newline='') as csvfile:
41
- fieldnames = ['timestamp', 'experiment', 'gpu_index', 'gpu_name',
42
- 'memory_used_mb', 'memory_total_mb', 'memory_percent',
43
- 'gpu_utilization']
44
- writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
45
- writer.writeheader()
46
-
47
- print(f"Starting GPU monitoring for experiment: {experiment_name}")
48
- print(f"Writing to: {output_file}")
49
- print("Press Ctrl+C to stop monitoring\n")
50
-
51
- try:
52
- while True:
53
- timestamp = datetime.datetime.now().isoformat()
54
- gpu_infos = get_gpu_memory_info()
55
-
56
- for gpu in gpu_infos:
57
- memory_percent = (gpu['memory_used_mb'] / gpu['memory_total_mb']) * 100
58
-
59
- writer.writerow({
60
- 'timestamp': timestamp,
61
- 'experiment': experiment_name,
62
- 'gpu_index': gpu['index'],
63
- 'gpu_name': gpu['name'],
64
- 'memory_used_mb': gpu['memory_used_mb'],
65
- 'memory_total_mb': gpu['memory_total_mb'],
66
- 'memory_percent': f"{memory_percent:.2f}",
67
- 'gpu_utilization': gpu['gpu_utilization']
68
- })
69
-
70
- print(f"GPU {gpu['index']}: {gpu['memory_used_mb']}/{gpu['memory_total_mb']} MB "
71
- f"({memory_percent:.1f}%) | Util: {gpu['gpu_utilization']}%")
72
-
73
- print("-" * 80)
74
- csvfile.flush()
75
- time.sleep(interval)
76
-
77
- except KeyboardInterrupt:
78
- print("\nMonitoring stopped.")
79
-
80
- def analyze_log(log_file):
81
- """Analyze the monitoring log and produce summary statistics"""
82
- data = []
83
- with open(log_file, 'r') as f:
84
- reader = csv.DictReader(f)
85
- for row in reader:
86
- row['memory_used_mb'] = int(row['memory_used_mb'])
87
- row['memory_total_mb'] = int(row['memory_total_mb'])
88
- row['memory_percent'] = float(row['memory_percent'])
89
- row['gpu_utilization'] = int(row['gpu_utilization'])
90
- data.append(row)
91
-
92
- if not data:
93
- print("No data found in log file")
94
- return
95
-
96
- # Group by GPU
97
- gpus = {}
98
- for row in data:
99
- gpu_idx = row['gpu_index']
100
- if gpu_idx not in gpus:
101
- gpus[gpu_idx] = []
102
- gpus[gpu_idx].append(row)
103
-
104
- print(f"\nAnalysis of {log_file}:")
105
- print("=" * 80)
106
-
107
- for gpu_idx, gpu_data in sorted(gpus.items()):
108
- memory_used = [d['memory_used_mb'] for d in gpu_data]
109
- memory_percent = [d['memory_percent'] for d in gpu_data]
110
- gpu_util = [d['gpu_utilization'] for d in gpu_data]
111
-
112
- print(f"\nGPU {gpu_idx} ({gpu_data[0]['gpu_name']}):")
113
- print(f" Memory - Max: {max(memory_used)} MB ({max(memory_percent):.1f}%)")
114
- print(f" Memory - Avg: {sum(memory_used)/len(memory_used):.0f} MB ({sum(memory_percent)/len(memory_percent):.1f}%)")
115
- print(f" GPU Util - Max: {max(gpu_util)}%")
116
- print(f" GPU Util - Avg: {sum(gpu_util)/len(gpu_util):.1f}%")
117
-
118
- if __name__ == "__main__":
119
- parser = argparse.ArgumentParser(description='GPU Memory Monitor for ML Experiments')
120
- parser.add_argument('--output', '-o', default='gpu_monitor.csv',
121
- help='Output CSV file')
122
- parser.add_argument('--interval', '-i', type=int, default=5,
123
- help='Monitoring interval in seconds')
124
- parser.add_argument('--experiment', '-e', default='',
125
- help='Experiment name/description')
126
- parser.add_argument('--analyze', '-a',
127
- help='Analyze existing log file instead of monitoring')
128
-
129
- args = parser.parse_args()
130
-
131
- if args.analyze:
132
- analyze_log(args.analyze)
133
- else:
134
- monitor_gpus(args.output, args.interval, args.experiment)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/outputs/special_tokens_map.json DELETED
@@ -1,30 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "</s>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "unk_token": {
24
- "content": "<unk>",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
- "single_word": false
29
- }
30
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/outputs/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
emissary-ml/llm-scripts/fine-tuning/llama3/test_script.py DELETED
@@ -1 +0,0 @@
1
- test_functions = {}
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/_virtualenv.py DELETED
@@ -1,130 +0,0 @@
1
- """Patches that are applied at runtime to the virtual environment"""
2
- # -*- coding: utf-8 -*-
3
-
4
- import os
5
- import sys
6
-
7
- VIRTUALENV_PATCH_FILE = os.path.join(__file__)
8
-
9
-
10
- def patch_dist(dist):
11
- """
12
- Distutils allows user to configure some arguments via a configuration file:
13
- https://docs.python.org/3/install/index.html#distutils-configuration-files
14
-
15
- Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up.
16
- """
17
- # we cannot allow some install config as that would get packages installed outside of the virtual environment
18
- old_parse_config_files = dist.Distribution.parse_config_files
19
-
20
- def parse_config_files(self, *args, **kwargs):
21
- result = old_parse_config_files(self, *args, **kwargs)
22
- install = self.get_option_dict("install")
23
-
24
- if "prefix" in install: # the prefix governs where to install the libraries
25
- install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix)
26
- for base in ("purelib", "platlib", "headers", "scripts", "data"):
27
- key = "install_{}".format(base)
28
- if key in install: # do not allow global configs to hijack venv paths
29
- install.pop(key, None)
30
- return result
31
-
32
- dist.Distribution.parse_config_files = parse_config_files
33
-
34
-
35
- # Import hook that patches some modules to ignore configuration values that break package installation in case
36
- # of virtual environments.
37
- _DISTUTILS_PATCH = "distutils.dist", "setuptools.dist"
38
- if sys.version_info > (3, 4):
39
- # https://docs.python.org/3/library/importlib.html#setting-up-an-importer
40
- from functools import partial
41
- from importlib.abc import MetaPathFinder
42
- from importlib.util import find_spec
43
-
44
- class _Finder(MetaPathFinder):
45
- """A meta path finder that allows patching the imported distutils modules"""
46
-
47
- fullname = None
48
-
49
- # lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup,
50
- # because there are gevent-based applications that need to be first to import threading by themselves.
51
- # See https://github.com/pypa/virtualenv/issues/1895 for details.
52
- lock = []
53
-
54
- def find_spec(self, fullname, path, target=None):
55
- if fullname in _DISTUTILS_PATCH and self.fullname is None:
56
- # initialize lock[0] lazily
57
- if len(self.lock) == 0:
58
- import threading
59
-
60
- lock = threading.Lock()
61
- # there is possibility that two threads T1 and T2 are simultaneously running into find_spec,
62
- # observing .lock as empty, and further going into hereby initialization. However due to the GIL,
63
- # list.append() operation is atomic and this way only one of the threads will "win" to put the lock
64
- # - that every thread will use - into .lock[0].
65
- # https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe
66
- self.lock.append(lock)
67
-
68
- with self.lock[0]:
69
- self.fullname = fullname
70
- try:
71
- spec = find_spec(fullname, path)
72
- if spec is not None:
73
- # https://www.python.org/dev/peps/pep-0451/#how-loading-will-work
74
- is_new_api = hasattr(spec.loader, "exec_module")
75
- func_name = "exec_module" if is_new_api else "load_module"
76
- old = getattr(spec.loader, func_name)
77
- func = self.exec_module if is_new_api else self.load_module
78
- if old is not func:
79
- try:
80
- setattr(spec.loader, func_name, partial(func, old))
81
- except AttributeError:
82
- pass # C-Extension loaders are r/o such as zipimporter with <python 3.7
83
- return spec
84
- finally:
85
- self.fullname = None
86
-
87
- @staticmethod
88
- def exec_module(old, module):
89
- old(module)
90
- if module.__name__ in _DISTUTILS_PATCH:
91
- patch_dist(module)
92
-
93
- @staticmethod
94
- def load_module(old, name):
95
- module = old(name)
96
- if module.__name__ in _DISTUTILS_PATCH:
97
- patch_dist(module)
98
- return module
99
-
100
- sys.meta_path.insert(0, _Finder())
101
- else:
102
- # https://www.python.org/dev/peps/pep-0302/
103
- from imp import find_module
104
- from pkgutil import ImpImporter, ImpLoader
105
-
106
- class _VirtualenvImporter(object, ImpImporter):
107
- def __init__(self, path=None):
108
- object.__init__(self)
109
- ImpImporter.__init__(self, path)
110
-
111
- def find_module(self, fullname, path=None):
112
- if fullname in _DISTUTILS_PATCH:
113
- try:
114
- return _VirtualenvLoader(fullname, *find_module(fullname.split(".")[-1], path))
115
- except ImportError:
116
- pass
117
- return None
118
-
119
- class _VirtualenvLoader(object, ImpLoader):
120
- def __init__(self, fullname, file, filename, etc):
121
- object.__init__(self)
122
- ImpLoader.__init__(self, fullname, file, filename, etc)
123
-
124
- def load_module(self, fullname):
125
- module = super(_VirtualenvLoader, self).load_module(fullname)
126
- patch_dist(module)
127
- module.__loader__ = None # distlib fallback
128
- return module
129
-
130
- sys.meta_path.append(_VirtualenvImporter())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/appdirs.py DELETED
@@ -1,608 +0,0 @@
1
- #!/usr/bin/env python
2
- # -*- coding: utf-8 -*-
3
- # Copyright (c) 2005-2010 ActiveState Software Inc.
4
- # Copyright (c) 2013 Eddy Petrișor
5
-
6
- """Utilities for determining application-specific dirs.
7
-
8
- See <http://github.com/ActiveState/appdirs> for details and usage.
9
- """
10
- # Dev Notes:
11
- # - MSDN on where to store app data files:
12
- # http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
13
- # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
14
- # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
15
-
16
- __version__ = "1.4.4"
17
- __version_info__ = tuple(int(segment) for segment in __version__.split("."))
18
-
19
-
20
- import sys
21
- import os
22
-
23
- PY3 = sys.version_info[0] == 3
24
-
25
- if PY3:
26
- unicode = str
27
-
28
- if sys.platform.startswith('java'):
29
- import platform
30
- os_name = platform.java_ver()[3][0]
31
- if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
32
- system = 'win32'
33
- elif os_name.startswith('Mac'): # "Mac OS X", etc.
34
- system = 'darwin'
35
- else: # "Linux", "SunOS", "FreeBSD", etc.
36
- # Setting this to "linux2" is not ideal, but only Windows or Mac
37
- # are actually checked for and the rest of the module expects
38
- # *sys.platform* style strings.
39
- system = 'linux2'
40
- else:
41
- system = sys.platform
42
-
43
-
44
-
45
- def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
46
- r"""Return full path to the user-specific data dir for this application.
47
-
48
- "appname" is the name of application.
49
- If None, just the system directory is returned.
50
- "appauthor" (only used on Windows) is the name of the
51
- appauthor or distributing body for this application. Typically
52
- it is the owning company name. This falls back to appname. You may
53
- pass False to disable it.
54
- "version" is an optional version path element to append to the
55
- path. You might want to use this if you want multiple versions
56
- of your app to be able to run independently. If used, this
57
- would typically be "<major>.<minor>".
58
- Only applied when appname is present.
59
- "roaming" (boolean, default False) can be set True to use the Windows
60
- roaming appdata directory. That means that for users on a Windows
61
- network setup for roaming profiles, this user data will be
62
- sync'd on login. See
63
- <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
64
- for a discussion of issues.
65
-
66
- Typical user data directories are:
67
- Mac OS X: ~/Library/Application Support/<AppName>
68
- Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
69
- Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
70
- Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
71
- Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
72
- Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
73
-
74
- For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
75
- That means, by default "~/.local/share/<AppName>".
76
- """
77
- if system == "win32":
78
- if appauthor is None:
79
- appauthor = appname
80
- const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
81
- path = os.path.normpath(_get_win_folder(const))
82
- if appname:
83
- if appauthor is not False:
84
- path = os.path.join(path, appauthor, appname)
85
- else:
86
- path = os.path.join(path, appname)
87
- elif system == 'darwin':
88
- path = os.path.expanduser('~/Library/Application Support/')
89
- if appname:
90
- path = os.path.join(path, appname)
91
- else:
92
- path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
93
- if appname:
94
- path = os.path.join(path, appname)
95
- if appname and version:
96
- path = os.path.join(path, version)
97
- return path
98
-
99
-
100
- def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
101
- r"""Return full path to the user-shared data dir for this application.
102
-
103
- "appname" is the name of application.
104
- If None, just the system directory is returned.
105
- "appauthor" (only used on Windows) is the name of the
106
- appauthor or distributing body for this application. Typically
107
- it is the owning company name. This falls back to appname. You may
108
- pass False to disable it.
109
- "version" is an optional version path element to append to the
110
- path. You might want to use this if you want multiple versions
111
- of your app to be able to run independently. If used, this
112
- would typically be "<major>.<minor>".
113
- Only applied when appname is present.
114
- "multipath" is an optional parameter only applicable to *nix
115
- which indicates that the entire list of data dirs should be
116
- returned. By default, the first item from XDG_DATA_DIRS is
117
- returned, or '/usr/local/share/<AppName>',
118
- if XDG_DATA_DIRS is not set
119
-
120
- Typical site data directories are:
121
- Mac OS X: /Library/Application Support/<AppName>
122
- Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
123
- Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
124
- Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
125
- Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
126
-
127
- For Unix, this is using the $XDG_DATA_DIRS[0] default.
128
-
129
- WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
130
- """
131
- if system == "win32":
132
- if appauthor is None:
133
- appauthor = appname
134
- path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
135
- if appname:
136
- if appauthor is not False:
137
- path = os.path.join(path, appauthor, appname)
138
- else:
139
- path = os.path.join(path, appname)
140
- elif system == 'darwin':
141
- path = os.path.expanduser('/Library/Application Support')
142
- if appname:
143
- path = os.path.join(path, appname)
144
- else:
145
- # XDG default for $XDG_DATA_DIRS
146
- # only first, if multipath is False
147
- path = os.getenv('XDG_DATA_DIRS',
148
- os.pathsep.join(['/usr/local/share', '/usr/share']))
149
- pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
150
- if appname:
151
- if version:
152
- appname = os.path.join(appname, version)
153
- pathlist = [os.sep.join([x, appname]) for x in pathlist]
154
-
155
- if multipath:
156
- path = os.pathsep.join(pathlist)
157
- else:
158
- path = pathlist[0]
159
- return path
160
-
161
- if appname and version:
162
- path = os.path.join(path, version)
163
- return path
164
-
165
-
166
- def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
167
- r"""Return full path to the user-specific config dir for this application.
168
-
169
- "appname" is the name of application.
170
- If None, just the system directory is returned.
171
- "appauthor" (only used on Windows) is the name of the
172
- appauthor or distributing body for this application. Typically
173
- it is the owning company name. This falls back to appname. You may
174
- pass False to disable it.
175
- "version" is an optional version path element to append to the
176
- path. You might want to use this if you want multiple versions
177
- of your app to be able to run independently. If used, this
178
- would typically be "<major>.<minor>".
179
- Only applied when appname is present.
180
- "roaming" (boolean, default False) can be set True to use the Windows
181
- roaming appdata directory. That means that for users on a Windows
182
- network setup for roaming profiles, this user data will be
183
- sync'd on login. See
184
- <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
185
- for a discussion of issues.
186
-
187
- Typical user config directories are:
188
- Mac OS X: same as user_data_dir
189
- Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
190
- Win *: same as user_data_dir
191
-
192
- For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
193
- That means, by default "~/.config/<AppName>".
194
- """
195
- if system in ["win32", "darwin"]:
196
- path = user_data_dir(appname, appauthor, None, roaming)
197
- else:
198
- path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
199
- if appname:
200
- path = os.path.join(path, appname)
201
- if appname and version:
202
- path = os.path.join(path, version)
203
- return path
204
-
205
-
206
- def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
207
- r"""Return full path to the user-shared data dir for this application.
208
-
209
- "appname" is the name of application.
210
- If None, just the system directory is returned.
211
- "appauthor" (only used on Windows) is the name of the
212
- appauthor or distributing body for this application. Typically
213
- it is the owning company name. This falls back to appname. You may
214
- pass False to disable it.
215
- "version" is an optional version path element to append to the
216
- path. You might want to use this if you want multiple versions
217
- of your app to be able to run independently. If used, this
218
- would typically be "<major>.<minor>".
219
- Only applied when appname is present.
220
- "multipath" is an optional parameter only applicable to *nix
221
- which indicates that the entire list of config dirs should be
222
- returned. By default, the first item from XDG_CONFIG_DIRS is
223
- returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
224
-
225
- Typical site config directories are:
226
- Mac OS X: same as site_data_dir
227
- Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
228
- $XDG_CONFIG_DIRS
229
- Win *: same as site_data_dir
230
- Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
231
-
232
- For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
233
-
234
- WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
235
- """
236
- if system in ["win32", "darwin"]:
237
- path = site_data_dir(appname, appauthor)
238
- if appname and version:
239
- path = os.path.join(path, version)
240
- else:
241
- # XDG default for $XDG_CONFIG_DIRS
242
- # only first, if multipath is False
243
- path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
244
- pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
245
- if appname:
246
- if version:
247
- appname = os.path.join(appname, version)
248
- pathlist = [os.sep.join([x, appname]) for x in pathlist]
249
-
250
- if multipath:
251
- path = os.pathsep.join(pathlist)
252
- else:
253
- path = pathlist[0]
254
- return path
255
-
256
-
257
- def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
258
- r"""Return full path to the user-specific cache dir for this application.
259
-
260
- "appname" is the name of application.
261
- If None, just the system directory is returned.
262
- "appauthor" (only used on Windows) is the name of the
263
- appauthor or distributing body for this application. Typically
264
- it is the owning company name. This falls back to appname. You may
265
- pass False to disable it.
266
- "version" is an optional version path element to append to the
267
- path. You might want to use this if you want multiple versions
268
- of your app to be able to run independently. If used, this
269
- would typically be "<major>.<minor>".
270
- Only applied when appname is present.
271
- "opinion" (boolean) can be False to disable the appending of
272
- "Cache" to the base app data dir for Windows. See
273
- discussion below.
274
-
275
- Typical user cache directories are:
276
- Mac OS X: ~/Library/Caches/<AppName>
277
- Unix: ~/.cache/<AppName> (XDG default)
278
- Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
279
- Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
280
-
281
- On Windows the only suggestion in the MSDN docs is that local settings go in
282
- the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
283
- app data dir (the default returned by `user_data_dir` above). Apps typically
284
- put cache data somewhere *under* the given dir here. Some examples:
285
- ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
286
- ...\Acme\SuperApp\Cache\1.0
287
- OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
288
- This can be disabled with the `opinion=False` option.
289
- """
290
- if system == "win32":
291
- if appauthor is None:
292
- appauthor = appname
293
- path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
294
- if appname:
295
- if appauthor is not False:
296
- path = os.path.join(path, appauthor, appname)
297
- else:
298
- path = os.path.join(path, appname)
299
- if opinion:
300
- path = os.path.join(path, "Cache")
301
- elif system == 'darwin':
302
- path = os.path.expanduser('~/Library/Caches')
303
- if appname:
304
- path = os.path.join(path, appname)
305
- else:
306
- path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
307
- if appname:
308
- path = os.path.join(path, appname)
309
- if appname and version:
310
- path = os.path.join(path, version)
311
- return path
312
-
313
-
314
- def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
315
- r"""Return full path to the user-specific state dir for this application.
316
-
317
- "appname" is the name of application.
318
- If None, just the system directory is returned.
319
- "appauthor" (only used on Windows) is the name of the
320
- appauthor or distributing body for this application. Typically
321
- it is the owning company name. This falls back to appname. You may
322
- pass False to disable it.
323
- "version" is an optional version path element to append to the
324
- path. You might want to use this if you want multiple versions
325
- of your app to be able to run independently. If used, this
326
- would typically be "<major>.<minor>".
327
- Only applied when appname is present.
328
- "roaming" (boolean, default False) can be set True to use the Windows
329
- roaming appdata directory. That means that for users on a Windows
330
- network setup for roaming profiles, this user data will be
331
- sync'd on login. See
332
- <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
333
- for a discussion of issues.
334
-
335
- Typical user state directories are:
336
- Mac OS X: same as user_data_dir
337
- Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
338
- Win *: same as user_data_dir
339
-
340
- For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
341
- to extend the XDG spec and support $XDG_STATE_HOME.
342
-
343
- That means, by default "~/.local/state/<AppName>".
344
- """
345
- if system in ["win32", "darwin"]:
346
- path = user_data_dir(appname, appauthor, None, roaming)
347
- else:
348
- path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
349
- if appname:
350
- path = os.path.join(path, appname)
351
- if appname and version:
352
- path = os.path.join(path, version)
353
- return path
354
-
355
-
356
- def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
357
- r"""Return full path to the user-specific log dir for this application.
358
-
359
- "appname" is the name of application.
360
- If None, just the system directory is returned.
361
- "appauthor" (only used on Windows) is the name of the
362
- appauthor or distributing body for this application. Typically
363
- it is the owning company name. This falls back to appname. You may
364
- pass False to disable it.
365
- "version" is an optional version path element to append to the
366
- path. You might want to use this if you want multiple versions
367
- of your app to be able to run independently. If used, this
368
- would typically be "<major>.<minor>".
369
- Only applied when appname is present.
370
- "opinion" (boolean) can be False to disable the appending of
371
- "Logs" to the base app data dir for Windows, and "log" to the
372
- base cache dir for Unix. See discussion below.
373
-
374
- Typical user log directories are:
375
- Mac OS X: ~/Library/Logs/<AppName>
376
- Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
377
- Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
378
- Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
379
-
380
- On Windows the only suggestion in the MSDN docs is that local settings
381
- go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
382
- examples of what some windows apps use for a logs dir.)
383
-
384
- OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
385
- value for Windows and appends "log" to the user cache dir for Unix.
386
- This can be disabled with the `opinion=False` option.
387
- """
388
- if system == "darwin":
389
- path = os.path.join(
390
- os.path.expanduser('~/Library/Logs'),
391
- appname)
392
- elif system == "win32":
393
- path = user_data_dir(appname, appauthor, version)
394
- version = False
395
- if opinion:
396
- path = os.path.join(path, "Logs")
397
- else:
398
- path = user_cache_dir(appname, appauthor, version)
399
- version = False
400
- if opinion:
401
- path = os.path.join(path, "log")
402
- if appname and version:
403
- path = os.path.join(path, version)
404
- return path
405
-
406
-
407
- class AppDirs(object):
408
- """Convenience wrapper for getting application dirs."""
409
- def __init__(self, appname=None, appauthor=None, version=None,
410
- roaming=False, multipath=False):
411
- self.appname = appname
412
- self.appauthor = appauthor
413
- self.version = version
414
- self.roaming = roaming
415
- self.multipath = multipath
416
-
417
- @property
418
- def user_data_dir(self):
419
- return user_data_dir(self.appname, self.appauthor,
420
- version=self.version, roaming=self.roaming)
421
-
422
- @property
423
- def site_data_dir(self):
424
- return site_data_dir(self.appname, self.appauthor,
425
- version=self.version, multipath=self.multipath)
426
-
427
- @property
428
- def user_config_dir(self):
429
- return user_config_dir(self.appname, self.appauthor,
430
- version=self.version, roaming=self.roaming)
431
-
432
- @property
433
- def site_config_dir(self):
434
- return site_config_dir(self.appname, self.appauthor,
435
- version=self.version, multipath=self.multipath)
436
-
437
- @property
438
- def user_cache_dir(self):
439
- return user_cache_dir(self.appname, self.appauthor,
440
- version=self.version)
441
-
442
- @property
443
- def user_state_dir(self):
444
- return user_state_dir(self.appname, self.appauthor,
445
- version=self.version)
446
-
447
- @property
448
- def user_log_dir(self):
449
- return user_log_dir(self.appname, self.appauthor,
450
- version=self.version)
451
-
452
-
453
- #---- internal support stuff
454
-
455
- def _get_win_folder_from_registry(csidl_name):
456
- """This is a fallback technique at best. I'm not sure if using the
457
- registry for this guarantees us the correct answer for all CSIDL_*
458
- names.
459
- """
460
- if PY3:
461
- import winreg as _winreg
462
- else:
463
- import _winreg
464
-
465
- shell_folder_name = {
466
- "CSIDL_APPDATA": "AppData",
467
- "CSIDL_COMMON_APPDATA": "Common AppData",
468
- "CSIDL_LOCAL_APPDATA": "Local AppData",
469
- }[csidl_name]
470
-
471
- key = _winreg.OpenKey(
472
- _winreg.HKEY_CURRENT_USER,
473
- r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
474
- )
475
- dir, type = _winreg.QueryValueEx(key, shell_folder_name)
476
- return dir
477
-
478
-
479
- def _get_win_folder_with_pywin32(csidl_name):
480
- from win32com.shell import shellcon, shell
481
- dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
482
- # Try to make this a unicode path because SHGetFolderPath does
483
- # not return unicode strings when there is unicode data in the
484
- # path.
485
- try:
486
- dir = unicode(dir)
487
-
488
- # Downgrade to short path name if have highbit chars. See
489
- # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
490
- has_high_char = False
491
- for c in dir:
492
- if ord(c) > 255:
493
- has_high_char = True
494
- break
495
- if has_high_char:
496
- try:
497
- import win32api
498
- dir = win32api.GetShortPathName(dir)
499
- except ImportError:
500
- pass
501
- except UnicodeError:
502
- pass
503
- return dir
504
-
505
-
506
- def _get_win_folder_with_ctypes(csidl_name):
507
- import ctypes
508
-
509
- csidl_const = {
510
- "CSIDL_APPDATA": 26,
511
- "CSIDL_COMMON_APPDATA": 35,
512
- "CSIDL_LOCAL_APPDATA": 28,
513
- }[csidl_name]
514
-
515
- buf = ctypes.create_unicode_buffer(1024)
516
- ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
517
-
518
- # Downgrade to short path name if have highbit chars. See
519
- # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
520
- has_high_char = False
521
- for c in buf:
522
- if ord(c) > 255:
523
- has_high_char = True
524
- break
525
- if has_high_char:
526
- buf2 = ctypes.create_unicode_buffer(1024)
527
- if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
528
- buf = buf2
529
-
530
- return buf.value
531
-
532
- def _get_win_folder_with_jna(csidl_name):
533
- import array
534
- from com.sun import jna
535
- from com.sun.jna.platform import win32
536
-
537
- buf_size = win32.WinDef.MAX_PATH * 2
538
- buf = array.zeros('c', buf_size)
539
- shell = win32.Shell32.INSTANCE
540
- shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
541
- dir = jna.Native.toString(buf.tostring()).rstrip("\0")
542
-
543
- # Downgrade to short path name if have highbit chars. See
544
- # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
545
- has_high_char = False
546
- for c in dir:
547
- if ord(c) > 255:
548
- has_high_char = True
549
- break
550
- if has_high_char:
551
- buf = array.zeros('c', buf_size)
552
- kernel = win32.Kernel32.INSTANCE
553
- if kernel.GetShortPathName(dir, buf, buf_size):
554
- dir = jna.Native.toString(buf.tostring()).rstrip("\0")
555
-
556
- return dir
557
-
558
- if system == "win32":
559
- try:
560
- import win32com.shell
561
- _get_win_folder = _get_win_folder_with_pywin32
562
- except ImportError:
563
- try:
564
- from ctypes import windll
565
- _get_win_folder = _get_win_folder_with_ctypes
566
- except ImportError:
567
- try:
568
- import com.sun.jna
569
- _get_win_folder = _get_win_folder_with_jna
570
- except ImportError:
571
- _get_win_folder = _get_win_folder_from_registry
572
-
573
-
574
- #---- self test code
575
-
576
- if __name__ == "__main__":
577
- appname = "MyApp"
578
- appauthor = "MyCompany"
579
-
580
- props = ("user_data_dir",
581
- "user_config_dir",
582
- "user_cache_dir",
583
- "user_state_dir",
584
- "user_log_dir",
585
- "site_data_dir",
586
- "site_config_dir")
587
-
588
- print("-- app dirs %s --" % __version__)
589
-
590
- print("-- app dirs (with optional 'version')")
591
- dirs = AppDirs(appname, appauthor, version="1.0")
592
- for prop in props:
593
- print("%s: %s" % (prop, getattr(dirs, prop)))
594
-
595
- print("\n-- app dirs (without optional 'version')")
596
- dirs = AppDirs(appname, appauthor)
597
- for prop in props:
598
- print("%s: %s" % (prop, getattr(dirs, prop)))
599
-
600
- print("\n-- app dirs (without optional 'appauthor')")
601
- dirs = AppDirs(appname)
602
- for prop in props:
603
- print("%s: %s" % (prop, getattr(dirs, prop)))
604
-
605
- print("\n-- app dirs (with disabled 'appauthor')")
606
- dirs = AppDirs(appname, appauthor=False)
607
- for prop in props:
608
- print("%s: %s" % (prop, getattr(dirs, prop)))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/decorator.py DELETED
@@ -1,459 +0,0 @@
1
- # ######################### LICENSE ############################ #
2
-
3
- # Copyright (c) 2005-2025, Michele Simionato
4
- # All rights reserved.
5
-
6
- # Redistribution and use in source and binary forms, with or without
7
- # modification, are permitted provided that the following conditions are
8
- # met:
9
-
10
- # Redistributions of source code must retain the above copyright
11
- # notice, this list of conditions and the following disclaimer.
12
- # Redistributions in bytecode form must reproduce the above copyright
13
- # notice, this list of conditions and the following disclaimer in
14
- # the documentation and/or other materials provided with the
15
- # distribution.
16
-
17
- # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18
- # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19
- # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20
- # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21
- # HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
22
- # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
23
- # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
24
- # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25
- # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
26
- # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
27
- # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
28
- # DAMAGE.
29
-
30
- """
31
- Decorator module, see
32
- https://github.com/micheles/decorator/blob/master/docs/documentation.md
33
- for the documentation.
34
- """
35
- import re
36
- import sys
37
- import inspect
38
- import operator
39
- import itertools
40
- import functools
41
- from contextlib import _GeneratorContextManager
42
- from inspect import getfullargspec, iscoroutinefunction, isgeneratorfunction
43
-
44
- __version__ = '5.2.1'
45
-
46
- DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(')
47
- POS = inspect.Parameter.POSITIONAL_OR_KEYWORD
48
- EMPTY = inspect.Parameter.empty
49
-
50
-
51
- # this is not used anymore in the core, but kept for backward compatibility
52
- class FunctionMaker(object):
53
- """
54
- An object with the ability to create functions with a given signature.
55
- It has attributes name, doc, module, signature, defaults, dict and
56
- methods update and make.
57
- """
58
-
59
- # Atomic get-and-increment provided by the GIL
60
- _compile_count = itertools.count()
61
-
62
- # make pylint happy
63
- args = varargs = varkw = defaults = kwonlyargs = kwonlydefaults = ()
64
-
65
- def __init__(self, func=None, name=None, signature=None,
66
- defaults=None, doc=None, module=None, funcdict=None):
67
- self.shortsignature = signature
68
- if func:
69
- # func can be a class or a callable, but not an instance method
70
- self.name = func.__name__
71
- if self.name == '<lambda>': # small hack for lambda functions
72
- self.name = '_lambda_'
73
- self.doc = func.__doc__
74
- self.module = func.__module__
75
- if inspect.isroutine(func) or isinstance(func, functools.partial):
76
- argspec = getfullargspec(func)
77
- self.annotations = getattr(func, '__annotations__', {})
78
- for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
79
- 'kwonlydefaults'):
80
- setattr(self, a, getattr(argspec, a))
81
- for i, arg in enumerate(self.args):
82
- setattr(self, 'arg%d' % i, arg)
83
- allargs = list(self.args)
84
- allshortargs = list(self.args)
85
- if self.varargs:
86
- allargs.append('*' + self.varargs)
87
- allshortargs.append('*' + self.varargs)
88
- elif self.kwonlyargs:
89
- allargs.append('*') # single star syntax
90
- for a in self.kwonlyargs:
91
- allargs.append('%s=None' % a)
92
- allshortargs.append('%s=%s' % (a, a))
93
- if self.varkw:
94
- allargs.append('**' + self.varkw)
95
- allshortargs.append('**' + self.varkw)
96
- self.signature = ', '.join(allargs)
97
- self.shortsignature = ', '.join(allshortargs)
98
- self.dict = func.__dict__.copy()
99
- # func=None happens when decorating a caller
100
- if name:
101
- self.name = name
102
- if signature is not None:
103
- self.signature = signature
104
- if defaults:
105
- self.defaults = defaults
106
- if doc:
107
- self.doc = doc
108
- if module:
109
- self.module = module
110
- if funcdict:
111
- self.dict = funcdict
112
- # check existence required attributes
113
- assert hasattr(self, 'name')
114
- if not hasattr(self, 'signature'):
115
- raise TypeError('You are decorating a non function: %s' % func)
116
-
117
- def update(self, func, **kw):
118
- """
119
- Update the signature of func with the data in self
120
- """
121
- func.__name__ = self.name
122
- func.__doc__ = getattr(self, 'doc', None)
123
- func.__dict__ = getattr(self, 'dict', {})
124
- func.__defaults__ = self.defaults
125
- func.__kwdefaults__ = self.kwonlydefaults or None
126
- func.__annotations__ = getattr(self, 'annotations', None)
127
- try:
128
- frame = sys._getframe(3)
129
- except AttributeError: # for IronPython and similar implementations
130
- callermodule = '?'
131
- else:
132
- callermodule = frame.f_globals.get('__name__', '?')
133
- func.__module__ = getattr(self, 'module', callermodule)
134
- func.__dict__.update(kw)
135
-
136
- def make(self, src_templ, evaldict=None, addsource=False, **attrs):
137
- """
138
- Make a new function from a given template and update the signature
139
- """
140
- src = src_templ % vars(self) # expand name and signature
141
- evaldict = evaldict or {}
142
- mo = DEF.search(src)
143
- if mo is None:
144
- raise SyntaxError('not a valid function template\n%s' % src)
145
- name = mo.group(1) # extract the function name
146
- names = set([name] + [arg.strip(' *') for arg in
147
- self.shortsignature.split(',')])
148
- for n in names:
149
- if n in ('_func_', '_call_'):
150
- raise NameError('%s is overridden in\n%s' % (n, src))
151
-
152
- if not src.endswith('\n'): # add a newline for old Pythons
153
- src += '\n'
154
-
155
- # Ensure each generated function has a unique filename for profilers
156
- # (such as cProfile) that depend on the tuple of (<filename>,
157
- # <definition line>, <function name>) being unique.
158
- filename = '<decorator-gen-%d>' % next(self._compile_count)
159
- try:
160
- code = compile(src, filename, 'single')
161
- exec(code, evaldict)
162
- except Exception:
163
- print('Error in generated code:', file=sys.stderr)
164
- print(src, file=sys.stderr)
165
- raise
166
- func = evaldict[name]
167
- if addsource:
168
- attrs['__source__'] = src
169
- self.update(func, **attrs)
170
- return func
171
-
172
- @classmethod
173
- def create(cls, obj, body, evaldict, defaults=None,
174
- doc=None, module=None, addsource=True, **attrs):
175
- """
176
- Create a function from the strings name, signature and body.
177
- evaldict is the evaluation dictionary. If addsource is true an
178
- attribute __source__ is added to the result. The attributes attrs
179
- are added, if any.
180
- """
181
- if isinstance(obj, str): # "name(signature)"
182
- name, rest = obj.strip().split('(', 1)
183
- signature = rest[:-1] # strip a right parens
184
- func = None
185
- else: # a function
186
- name = None
187
- signature = None
188
- func = obj
189
- self = cls(func, name, signature, defaults, doc, module)
190
- ibody = '\n'.join(' ' + line for line in body.splitlines())
191
- caller = evaldict.get('_call_') # when called from `decorate`
192
- if caller and iscoroutinefunction(caller):
193
- body = ('async def %(name)s(%(signature)s):\n' + ibody).replace(
194
- 'return', 'return await')
195
- else:
196
- body = 'def %(name)s(%(signature)s):\n' + ibody
197
- return self.make(body, evaldict, addsource, **attrs)
198
-
199
-
200
- def fix(args, kwargs, sig):
201
- """
202
- Fix args and kwargs to be consistent with the signature
203
- """
204
- ba = sig.bind(*args, **kwargs)
205
- ba.apply_defaults() # needed for test_dan_schult
206
- return ba.args, ba.kwargs
207
-
208
-
209
- def decorate(func, caller, extras=(), kwsyntax=False):
210
- """
211
- Decorates a function/generator/coroutine using a caller.
212
- If kwsyntax is True calling the decorated functions with keyword
213
- syntax will pass the named arguments inside the ``kw`` dictionary,
214
- even if such argument are positional, similarly to what functools.wraps
215
- does. By default kwsyntax is False and the the arguments are untouched.
216
- """
217
- sig = inspect.signature(func)
218
- if isinstance(func, functools.partial):
219
- func = functools.update_wrapper(func, func.func)
220
- if iscoroutinefunction(caller):
221
- async def fun(*args, **kw):
222
- if not kwsyntax:
223
- args, kw = fix(args, kw, sig)
224
- return await caller(func, *(extras + args), **kw)
225
- elif isgeneratorfunction(caller):
226
- def fun(*args, **kw):
227
- if not kwsyntax:
228
- args, kw = fix(args, kw, sig)
229
- for res in caller(func, *(extras + args), **kw):
230
- yield res
231
- else:
232
- def fun(*args, **kw):
233
- if not kwsyntax:
234
- args, kw = fix(args, kw, sig)
235
- return caller(func, *(extras + args), **kw)
236
-
237
- fun.__name__ = func.__name__
238
- fun.__doc__ = func.__doc__
239
- fun.__wrapped__ = func
240
- fun.__signature__ = sig
241
- fun.__qualname__ = func.__qualname__
242
- # builtin functions like defaultdict.__setitem__ lack many attributes
243
- try:
244
- fun.__defaults__ = func.__defaults__
245
- except AttributeError:
246
- pass
247
- try:
248
- fun.__kwdefaults__ = func.__kwdefaults__
249
- except AttributeError:
250
- pass
251
- try:
252
- fun.__annotations__ = func.__annotations__
253
- except AttributeError:
254
- pass
255
- try:
256
- fun.__module__ = func.__module__
257
- except AttributeError:
258
- pass
259
- try:
260
- fun.__name__ = func.__name__
261
- except AttributeError: # happens with old versions of numpy.vectorize
262
- func.__name__ == 'noname'
263
- try:
264
- fun.__dict__.update(func.__dict__)
265
- except AttributeError:
266
- pass
267
- return fun
268
-
269
-
270
- def decoratorx(caller):
271
- """
272
- A version of "decorator" implemented via "exec" and not via the
273
- Signature object. Use this if you are want to preserve the `.__code__`
274
- object properties (https://github.com/micheles/decorator/issues/129).
275
- """
276
- def dec(func):
277
- return FunctionMaker.create(
278
- func,
279
- "return _call_(_func_, %(shortsignature)s)",
280
- dict(_call_=caller, _func_=func),
281
- __wrapped__=func, __qualname__=func.__qualname__)
282
- return dec
283
-
284
-
285
- def decorator(caller, _func=None, kwsyntax=False):
286
- """
287
- decorator(caller) converts a caller function into a decorator
288
- """
289
- if _func is not None: # return a decorated function
290
- # this is obsolete behavior; you should use decorate instead
291
- return decorate(_func, caller, (), kwsyntax)
292
- # else return a decorator function
293
- sig = inspect.signature(caller)
294
- dec_params = [p for p in sig.parameters.values() if p.kind is POS]
295
-
296
- def dec(func=None, *args, **kw):
297
- na = len(args) + 1
298
- extras = args + tuple(kw.get(p.name, p.default)
299
- for p in dec_params[na:]
300
- if p.default is not EMPTY)
301
- if func is None:
302
- return lambda func: decorate(func, caller, extras, kwsyntax)
303
- else:
304
- return decorate(func, caller, extras, kwsyntax)
305
- dec.__signature__ = sig.replace(parameters=dec_params)
306
- dec.__name__ = caller.__name__
307
- dec.__doc__ = caller.__doc__
308
- dec.__wrapped__ = caller
309
- dec.__qualname__ = caller.__qualname__
310
- dec.__kwdefaults__ = getattr(caller, '__kwdefaults__', None)
311
- dec.__dict__.update(caller.__dict__)
312
- return dec
313
-
314
-
315
- # ####################### contextmanager ####################### #
316
-
317
-
318
- class ContextManager(_GeneratorContextManager):
319
- def __init__(self, g, *a, **k):
320
- _GeneratorContextManager.__init__(self, g, a, k)
321
-
322
- def __call__(self, func):
323
- def caller(f, *a, **k):
324
- with self.__class__(self.func, *self.args, **self.kwds):
325
- return f(*a, **k)
326
- return decorate(func, caller)
327
-
328
-
329
- _contextmanager = decorator(ContextManager)
330
-
331
-
332
- def contextmanager(func):
333
- # Enable Pylint config: contextmanager-decorators=decorator.contextmanager
334
- return _contextmanager(func)
335
-
336
-
337
- # ############################ dispatch_on ############################ #
338
-
339
- def append(a, vancestors):
340
- """
341
- Append ``a`` to the list of the virtual ancestors, unless it is already
342
- included.
343
- """
344
- add = True
345
- for j, va in enumerate(vancestors):
346
- if issubclass(va, a):
347
- add = False
348
- break
349
- if issubclass(a, va):
350
- vancestors[j] = a
351
- add = False
352
- if add:
353
- vancestors.append(a)
354
-
355
-
356
- # inspired from simplegeneric by P.J. Eby and functools.singledispatch
357
- def dispatch_on(*dispatch_args):
358
- """
359
- Factory of decorators turning a function into a generic function
360
- dispatching on the given arguments.
361
- """
362
- assert dispatch_args, 'No dispatch args passed'
363
- dispatch_str = '(%s,)' % ', '.join(dispatch_args)
364
-
365
- def check(arguments, wrong=operator.ne, msg=''):
366
- """Make sure one passes the expected number of arguments"""
367
- if wrong(len(arguments), len(dispatch_args)):
368
- raise TypeError('Expected %d arguments, got %d%s' %
369
- (len(dispatch_args), len(arguments), msg))
370
-
371
- def gen_func_dec(func):
372
- """Decorator turning a function into a generic function"""
373
-
374
- # first check the dispatch arguments
375
- argset = set(getfullargspec(func).args)
376
- if not set(dispatch_args) <= argset:
377
- raise NameError('Unknown dispatch arguments %s' % dispatch_str)
378
-
379
- typemap = {}
380
-
381
- def vancestors(*types):
382
- """
383
- Get a list of sets of virtual ancestors for the given types
384
- """
385
- check(types)
386
- ras = [[] for _ in range(len(dispatch_args))]
387
- for types_ in typemap:
388
- for t, type_, ra in zip(types, types_, ras):
389
- if issubclass(t, type_) and type_ not in t.mro():
390
- append(type_, ra)
391
- return [set(ra) for ra in ras]
392
-
393
- def ancestors(*types):
394
- """
395
- Get a list of virtual MROs, one for each type
396
- """
397
- check(types)
398
- lists = []
399
- for t, vas in zip(types, vancestors(*types)):
400
- n_vas = len(vas)
401
- if n_vas > 1:
402
- raise RuntimeError(
403
- 'Ambiguous dispatch for %s: %s' % (t, vas))
404
- elif n_vas == 1:
405
- va, = vas
406
- mro = type('t', (t, va), {}).mro()[1:]
407
- else:
408
- mro = t.mro()
409
- lists.append(mro[:-1]) # discard t and object
410
- return lists
411
-
412
- def register(*types):
413
- """
414
- Decorator to register an implementation for the given types
415
- """
416
- check(types)
417
-
418
- def dec(f):
419
- check(getfullargspec(f).args, operator.lt, ' in ' + f.__name__)
420
- typemap[types] = f
421
- return f
422
- return dec
423
-
424
- def dispatch_info(*types):
425
- """
426
- An utility to introspect the dispatch algorithm
427
- """
428
- check(types)
429
- lst = []
430
- for ancs in itertools.product(*ancestors(*types)):
431
- lst.append(tuple(a.__name__ for a in ancs))
432
- return lst
433
-
434
- def _dispatch(dispatch_args, *args, **kw):
435
- types = tuple(type(arg) for arg in dispatch_args)
436
- try: # fast path
437
- f = typemap[types]
438
- except KeyError:
439
- pass
440
- else:
441
- return f(*args, **kw)
442
- combinations = itertools.product(*ancestors(*types))
443
- next(combinations) # the first one has been already tried
444
- for types_ in combinations:
445
- f = typemap.get(types_)
446
- if f is not None:
447
- return f(*args, **kw)
448
-
449
- # else call the default implementation
450
- return func(*args, **kw)
451
-
452
- return FunctionMaker.create(
453
- func, 'return _f_(%s, %%(shortsignature)s)' % dispatch_str,
454
- dict(_f_=_dispatch), register=register, default=func,
455
- typemap=typemap, vancestors=vancestors, ancestors=ancestors,
456
- dispatch_info=dispatch_info, __wrapped__=func)
457
-
458
- gen_func_dec.__name__ = 'dispatch_on' + dispatch_str
459
- return gen_func_dec
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/distutils-precedence.pth DELETED
@@ -1 +0,0 @@
1
- import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim();
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__init__.py DELETED
@@ -1,105 +0,0 @@
1
- from .__version__ import __description__, __title__, __version__
2
- from ._api import *
3
- from ._auth import *
4
- from ._client import *
5
- from ._config import *
6
- from ._content import *
7
- from ._exceptions import *
8
- from ._models import *
9
- from ._status_codes import *
10
- from ._transports import *
11
- from ._types import *
12
- from ._urls import *
13
-
14
- try:
15
- from ._main import main
16
- except ImportError: # pragma: no cover
17
-
18
- def main() -> None: # type: ignore
19
- import sys
20
-
21
- print(
22
- "The httpx command line client could not run because the required "
23
- "dependencies were not installed.\nMake sure you've installed "
24
- "everything with: pip install 'httpx[cli]'"
25
- )
26
- sys.exit(1)
27
-
28
-
29
- __all__ = [
30
- "__description__",
31
- "__title__",
32
- "__version__",
33
- "ASGITransport",
34
- "AsyncBaseTransport",
35
- "AsyncByteStream",
36
- "AsyncClient",
37
- "AsyncHTTPTransport",
38
- "Auth",
39
- "BaseTransport",
40
- "BasicAuth",
41
- "ByteStream",
42
- "Client",
43
- "CloseError",
44
- "codes",
45
- "ConnectError",
46
- "ConnectTimeout",
47
- "CookieConflict",
48
- "Cookies",
49
- "create_ssl_context",
50
- "DecodingError",
51
- "delete",
52
- "DigestAuth",
53
- "get",
54
- "head",
55
- "Headers",
56
- "HTTPError",
57
- "HTTPStatusError",
58
- "HTTPTransport",
59
- "InvalidURL",
60
- "Limits",
61
- "LocalProtocolError",
62
- "main",
63
- "MockTransport",
64
- "NetRCAuth",
65
- "NetworkError",
66
- "options",
67
- "patch",
68
- "PoolTimeout",
69
- "post",
70
- "ProtocolError",
71
- "Proxy",
72
- "ProxyError",
73
- "put",
74
- "QueryParams",
75
- "ReadError",
76
- "ReadTimeout",
77
- "RemoteProtocolError",
78
- "request",
79
- "Request",
80
- "RequestError",
81
- "RequestNotRead",
82
- "Response",
83
- "ResponseNotRead",
84
- "stream",
85
- "StreamClosed",
86
- "StreamConsumed",
87
- "StreamError",
88
- "SyncByteStream",
89
- "Timeout",
90
- "TimeoutException",
91
- "TooManyRedirects",
92
- "TransportError",
93
- "UnsupportedProtocol",
94
- "URL",
95
- "USE_CLIENT_DEFAULT",
96
- "WriteError",
97
- "WriteTimeout",
98
- "WSGITransport",
99
- ]
100
-
101
-
102
- __locals = locals()
103
- for __name in __all__:
104
- if not __name.startswith("__"):
105
- setattr(__locals[__name], "__module__", "httpx") # noqa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc DELETED
Binary file (1.92 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc DELETED
Binary file (321 Bytes)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc DELETED
Binary file (7.7 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_auth.cpython-310.pyc DELETED
Binary file (10.6 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc DELETED
Binary file (41.5 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc DELETED
Binary file (7.14 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc DELETED
Binary file (7.35 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc DELETED
Binary file (10.9 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc DELETED
Binary file (10.4 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_main.cpython-310.pyc DELETED
Binary file (12.5 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc DELETED
Binary file (39.2 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_multipart.cpython-310.pyc DELETED
Binary file (9.01 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc DELETED
Binary file (6.35 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc DELETED
Binary file (2.89 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc DELETED
Binary file (11.1 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_urls.cpython-310.pyc DELETED
Binary file (21.9 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc DELETED
Binary file (6.75 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/__version__.py DELETED
@@ -1,3 +0,0 @@
1
- __title__ = "httpx"
2
- __description__ = "A next generation HTTP client, for Python 3."
3
- __version__ = "0.28.1"
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_api.py DELETED
@@ -1,438 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import typing
4
- from contextlib import contextmanager
5
-
6
- from ._client import Client
7
- from ._config import DEFAULT_TIMEOUT_CONFIG
8
- from ._models import Response
9
- from ._types import (
10
- AuthTypes,
11
- CookieTypes,
12
- HeaderTypes,
13
- ProxyTypes,
14
- QueryParamTypes,
15
- RequestContent,
16
- RequestData,
17
- RequestFiles,
18
- TimeoutTypes,
19
- )
20
- from ._urls import URL
21
-
22
- if typing.TYPE_CHECKING:
23
- import ssl # pragma: no cover
24
-
25
-
26
- __all__ = [
27
- "delete",
28
- "get",
29
- "head",
30
- "options",
31
- "patch",
32
- "post",
33
- "put",
34
- "request",
35
- "stream",
36
- ]
37
-
38
-
39
- def request(
40
- method: str,
41
- url: URL | str,
42
- *,
43
- params: QueryParamTypes | None = None,
44
- content: RequestContent | None = None,
45
- data: RequestData | None = None,
46
- files: RequestFiles | None = None,
47
- json: typing.Any | None = None,
48
- headers: HeaderTypes | None = None,
49
- cookies: CookieTypes | None = None,
50
- auth: AuthTypes | None = None,
51
- proxy: ProxyTypes | None = None,
52
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
53
- follow_redirects: bool = False,
54
- verify: ssl.SSLContext | str | bool = True,
55
- trust_env: bool = True,
56
- ) -> Response:
57
- """
58
- Sends an HTTP request.
59
-
60
- **Parameters:**
61
-
62
- * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`,
63
- `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`.
64
- * **url** - URL for the new `Request` object.
65
- * **params** - *(optional)* Query parameters to include in the URL, as a
66
- string, dictionary, or sequence of two-tuples.
67
- * **content** - *(optional)* Binary content to include in the body of the
68
- request, as bytes or a byte iterator.
69
- * **data** - *(optional)* Form data to include in the body of the request,
70
- as a dictionary.
71
- * **files** - *(optional)* A dictionary of upload files to include in the
72
- body of the request.
73
- * **json** - *(optional)* A JSON serializable object to include in the body
74
- of the request.
75
- * **headers** - *(optional)* Dictionary of HTTP headers to include in the
76
- request.
77
- * **cookies** - *(optional)* Dictionary of Cookie items to include in the
78
- request.
79
- * **auth** - *(optional)* An authentication class to use when sending the
80
- request.
81
- * **proxy** - *(optional)* A proxy URL where all the traffic should be routed.
82
- * **timeout** - *(optional)* The timeout configuration to use when sending
83
- the request.
84
- * **follow_redirects** - *(optional)* Enables or disables HTTP redirects.
85
- * **verify** - *(optional)* Either `True` to use an SSL context with the
86
- default CA bundle, `False` to disable verification, or an instance of
87
- `ssl.SSLContext` to use a custom context.
88
- * **trust_env** - *(optional)* Enables or disables usage of environment
89
- variables for configuration.
90
-
91
- **Returns:** `Response`
92
-
93
- Usage:
94
-
95
- ```
96
- >>> import httpx
97
- >>> response = httpx.request('GET', 'https://httpbin.org/get')
98
- >>> response
99
- <Response [200 OK]>
100
- ```
101
- """
102
- with Client(
103
- cookies=cookies,
104
- proxy=proxy,
105
- verify=verify,
106
- timeout=timeout,
107
- trust_env=trust_env,
108
- ) as client:
109
- return client.request(
110
- method=method,
111
- url=url,
112
- content=content,
113
- data=data,
114
- files=files,
115
- json=json,
116
- params=params,
117
- headers=headers,
118
- auth=auth,
119
- follow_redirects=follow_redirects,
120
- )
121
-
122
-
123
- @contextmanager
124
- def stream(
125
- method: str,
126
- url: URL | str,
127
- *,
128
- params: QueryParamTypes | None = None,
129
- content: RequestContent | None = None,
130
- data: RequestData | None = None,
131
- files: RequestFiles | None = None,
132
- json: typing.Any | None = None,
133
- headers: HeaderTypes | None = None,
134
- cookies: CookieTypes | None = None,
135
- auth: AuthTypes | None = None,
136
- proxy: ProxyTypes | None = None,
137
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
138
- follow_redirects: bool = False,
139
- verify: ssl.SSLContext | str | bool = True,
140
- trust_env: bool = True,
141
- ) -> typing.Iterator[Response]:
142
- """
143
- Alternative to `httpx.request()` that streams the response body
144
- instead of loading it into memory at once.
145
-
146
- **Parameters**: See `httpx.request`.
147
-
148
- See also: [Streaming Responses][0]
149
-
150
- [0]: /quickstart#streaming-responses
151
- """
152
- with Client(
153
- cookies=cookies,
154
- proxy=proxy,
155
- verify=verify,
156
- timeout=timeout,
157
- trust_env=trust_env,
158
- ) as client:
159
- with client.stream(
160
- method=method,
161
- url=url,
162
- content=content,
163
- data=data,
164
- files=files,
165
- json=json,
166
- params=params,
167
- headers=headers,
168
- auth=auth,
169
- follow_redirects=follow_redirects,
170
- ) as response:
171
- yield response
172
-
173
-
174
- def get(
175
- url: URL | str,
176
- *,
177
- params: QueryParamTypes | None = None,
178
- headers: HeaderTypes | None = None,
179
- cookies: CookieTypes | None = None,
180
- auth: AuthTypes | None = None,
181
- proxy: ProxyTypes | None = None,
182
- follow_redirects: bool = False,
183
- verify: ssl.SSLContext | str | bool = True,
184
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
185
- trust_env: bool = True,
186
- ) -> Response:
187
- """
188
- Sends a `GET` request.
189
-
190
- **Parameters**: See `httpx.request`.
191
-
192
- Note that the `data`, `files`, `json` and `content` parameters are not available
193
- on this function, as `GET` requests should not include a request body.
194
- """
195
- return request(
196
- "GET",
197
- url,
198
- params=params,
199
- headers=headers,
200
- cookies=cookies,
201
- auth=auth,
202
- proxy=proxy,
203
- follow_redirects=follow_redirects,
204
- verify=verify,
205
- timeout=timeout,
206
- trust_env=trust_env,
207
- )
208
-
209
-
210
- def options(
211
- url: URL | str,
212
- *,
213
- params: QueryParamTypes | None = None,
214
- headers: HeaderTypes | None = None,
215
- cookies: CookieTypes | None = None,
216
- auth: AuthTypes | None = None,
217
- proxy: ProxyTypes | None = None,
218
- follow_redirects: bool = False,
219
- verify: ssl.SSLContext | str | bool = True,
220
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
221
- trust_env: bool = True,
222
- ) -> Response:
223
- """
224
- Sends an `OPTIONS` request.
225
-
226
- **Parameters**: See `httpx.request`.
227
-
228
- Note that the `data`, `files`, `json` and `content` parameters are not available
229
- on this function, as `OPTIONS` requests should not include a request body.
230
- """
231
- return request(
232
- "OPTIONS",
233
- url,
234
- params=params,
235
- headers=headers,
236
- cookies=cookies,
237
- auth=auth,
238
- proxy=proxy,
239
- follow_redirects=follow_redirects,
240
- verify=verify,
241
- timeout=timeout,
242
- trust_env=trust_env,
243
- )
244
-
245
-
246
- def head(
247
- url: URL | str,
248
- *,
249
- params: QueryParamTypes | None = None,
250
- headers: HeaderTypes | None = None,
251
- cookies: CookieTypes | None = None,
252
- auth: AuthTypes | None = None,
253
- proxy: ProxyTypes | None = None,
254
- follow_redirects: bool = False,
255
- verify: ssl.SSLContext | str | bool = True,
256
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
257
- trust_env: bool = True,
258
- ) -> Response:
259
- """
260
- Sends a `HEAD` request.
261
-
262
- **Parameters**: See `httpx.request`.
263
-
264
- Note that the `data`, `files`, `json` and `content` parameters are not available
265
- on this function, as `HEAD` requests should not include a request body.
266
- """
267
- return request(
268
- "HEAD",
269
- url,
270
- params=params,
271
- headers=headers,
272
- cookies=cookies,
273
- auth=auth,
274
- proxy=proxy,
275
- follow_redirects=follow_redirects,
276
- verify=verify,
277
- timeout=timeout,
278
- trust_env=trust_env,
279
- )
280
-
281
-
282
- def post(
283
- url: URL | str,
284
- *,
285
- content: RequestContent | None = None,
286
- data: RequestData | None = None,
287
- files: RequestFiles | None = None,
288
- json: typing.Any | None = None,
289
- params: QueryParamTypes | None = None,
290
- headers: HeaderTypes | None = None,
291
- cookies: CookieTypes | None = None,
292
- auth: AuthTypes | None = None,
293
- proxy: ProxyTypes | None = None,
294
- follow_redirects: bool = False,
295
- verify: ssl.SSLContext | str | bool = True,
296
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
297
- trust_env: bool = True,
298
- ) -> Response:
299
- """
300
- Sends a `POST` request.
301
-
302
- **Parameters**: See `httpx.request`.
303
- """
304
- return request(
305
- "POST",
306
- url,
307
- content=content,
308
- data=data,
309
- files=files,
310
- json=json,
311
- params=params,
312
- headers=headers,
313
- cookies=cookies,
314
- auth=auth,
315
- proxy=proxy,
316
- follow_redirects=follow_redirects,
317
- verify=verify,
318
- timeout=timeout,
319
- trust_env=trust_env,
320
- )
321
-
322
-
323
- def put(
324
- url: URL | str,
325
- *,
326
- content: RequestContent | None = None,
327
- data: RequestData | None = None,
328
- files: RequestFiles | None = None,
329
- json: typing.Any | None = None,
330
- params: QueryParamTypes | None = None,
331
- headers: HeaderTypes | None = None,
332
- cookies: CookieTypes | None = None,
333
- auth: AuthTypes | None = None,
334
- proxy: ProxyTypes | None = None,
335
- follow_redirects: bool = False,
336
- verify: ssl.SSLContext | str | bool = True,
337
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
338
- trust_env: bool = True,
339
- ) -> Response:
340
- """
341
- Sends a `PUT` request.
342
-
343
- **Parameters**: See `httpx.request`.
344
- """
345
- return request(
346
- "PUT",
347
- url,
348
- content=content,
349
- data=data,
350
- files=files,
351
- json=json,
352
- params=params,
353
- headers=headers,
354
- cookies=cookies,
355
- auth=auth,
356
- proxy=proxy,
357
- follow_redirects=follow_redirects,
358
- verify=verify,
359
- timeout=timeout,
360
- trust_env=trust_env,
361
- )
362
-
363
-
364
- def patch(
365
- url: URL | str,
366
- *,
367
- content: RequestContent | None = None,
368
- data: RequestData | None = None,
369
- files: RequestFiles | None = None,
370
- json: typing.Any | None = None,
371
- params: QueryParamTypes | None = None,
372
- headers: HeaderTypes | None = None,
373
- cookies: CookieTypes | None = None,
374
- auth: AuthTypes | None = None,
375
- proxy: ProxyTypes | None = None,
376
- follow_redirects: bool = False,
377
- verify: ssl.SSLContext | str | bool = True,
378
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
379
- trust_env: bool = True,
380
- ) -> Response:
381
- """
382
- Sends a `PATCH` request.
383
-
384
- **Parameters**: See `httpx.request`.
385
- """
386
- return request(
387
- "PATCH",
388
- url,
389
- content=content,
390
- data=data,
391
- files=files,
392
- json=json,
393
- params=params,
394
- headers=headers,
395
- cookies=cookies,
396
- auth=auth,
397
- proxy=proxy,
398
- follow_redirects=follow_redirects,
399
- verify=verify,
400
- timeout=timeout,
401
- trust_env=trust_env,
402
- )
403
-
404
-
405
- def delete(
406
- url: URL | str,
407
- *,
408
- params: QueryParamTypes | None = None,
409
- headers: HeaderTypes | None = None,
410
- cookies: CookieTypes | None = None,
411
- auth: AuthTypes | None = None,
412
- proxy: ProxyTypes | None = None,
413
- follow_redirects: bool = False,
414
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
415
- verify: ssl.SSLContext | str | bool = True,
416
- trust_env: bool = True,
417
- ) -> Response:
418
- """
419
- Sends a `DELETE` request.
420
-
421
- **Parameters**: See `httpx.request`.
422
-
423
- Note that the `data`, `files`, `json` and `content` parameters are not available
424
- on this function, as `DELETE` requests should not include a request body.
425
- """
426
- return request(
427
- "DELETE",
428
- url,
429
- params=params,
430
- headers=headers,
431
- cookies=cookies,
432
- auth=auth,
433
- proxy=proxy,
434
- follow_redirects=follow_redirects,
435
- verify=verify,
436
- timeout=timeout,
437
- trust_env=trust_env,
438
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_auth.py DELETED
@@ -1,348 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import hashlib
4
- import os
5
- import re
6
- import time
7
- import typing
8
- from base64 import b64encode
9
- from urllib.request import parse_http_list
10
-
11
- from ._exceptions import ProtocolError
12
- from ._models import Cookies, Request, Response
13
- from ._utils import to_bytes, to_str, unquote
14
-
15
- if typing.TYPE_CHECKING: # pragma: no cover
16
- from hashlib import _Hash
17
-
18
-
19
- __all__ = ["Auth", "BasicAuth", "DigestAuth", "NetRCAuth"]
20
-
21
-
22
- class Auth:
23
- """
24
- Base class for all authentication schemes.
25
-
26
- To implement a custom authentication scheme, subclass `Auth` and override
27
- the `.auth_flow()` method.
28
-
29
- If the authentication scheme does I/O such as disk access or network calls, or uses
30
- synchronization primitives such as locks, you should override `.sync_auth_flow()`
31
- and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized
32
- implementations that will be used by `Client` and `AsyncClient` respectively.
33
- """
34
-
35
- requires_request_body = False
36
- requires_response_body = False
37
-
38
- def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
39
- """
40
- Execute the authentication flow.
41
-
42
- To dispatch a request, `yield` it:
43
-
44
- ```
45
- yield request
46
- ```
47
-
48
- The client will `.send()` the response back into the flow generator. You can
49
- access it like so:
50
-
51
- ```
52
- response = yield request
53
- ```
54
-
55
- A `return` (or reaching the end of the generator) will result in the
56
- client returning the last response obtained from the server.
57
-
58
- You can dispatch as many requests as is necessary.
59
- """
60
- yield request
61
-
62
- def sync_auth_flow(
63
- self, request: Request
64
- ) -> typing.Generator[Request, Response, None]:
65
- """
66
- Execute the authentication flow synchronously.
67
-
68
- By default, this defers to `.auth_flow()`. You should override this method
69
- when the authentication scheme does I/O and/or uses concurrency primitives.
70
- """
71
- if self.requires_request_body:
72
- request.read()
73
-
74
- flow = self.auth_flow(request)
75
- request = next(flow)
76
-
77
- while True:
78
- response = yield request
79
- if self.requires_response_body:
80
- response.read()
81
-
82
- try:
83
- request = flow.send(response)
84
- except StopIteration:
85
- break
86
-
87
- async def async_auth_flow(
88
- self, request: Request
89
- ) -> typing.AsyncGenerator[Request, Response]:
90
- """
91
- Execute the authentication flow asynchronously.
92
-
93
- By default, this defers to `.auth_flow()`. You should override this method
94
- when the authentication scheme does I/O and/or uses concurrency primitives.
95
- """
96
- if self.requires_request_body:
97
- await request.aread()
98
-
99
- flow = self.auth_flow(request)
100
- request = next(flow)
101
-
102
- while True:
103
- response = yield request
104
- if self.requires_response_body:
105
- await response.aread()
106
-
107
- try:
108
- request = flow.send(response)
109
- except StopIteration:
110
- break
111
-
112
-
113
- class FunctionAuth(Auth):
114
- """
115
- Allows the 'auth' argument to be passed as a simple callable function,
116
- that takes the request, and returns a new, modified request.
117
- """
118
-
119
- def __init__(self, func: typing.Callable[[Request], Request]) -> None:
120
- self._func = func
121
-
122
- def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
123
- yield self._func(request)
124
-
125
-
126
- class BasicAuth(Auth):
127
- """
128
- Allows the 'auth' argument to be passed as a (username, password) pair,
129
- and uses HTTP Basic authentication.
130
- """
131
-
132
- def __init__(self, username: str | bytes, password: str | bytes) -> None:
133
- self._auth_header = self._build_auth_header(username, password)
134
-
135
- def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
136
- request.headers["Authorization"] = self._auth_header
137
- yield request
138
-
139
- def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str:
140
- userpass = b":".join((to_bytes(username), to_bytes(password)))
141
- token = b64encode(userpass).decode()
142
- return f"Basic {token}"
143
-
144
-
145
- class NetRCAuth(Auth):
146
- """
147
- Use a 'netrc' file to lookup basic auth credentials based on the url host.
148
- """
149
-
150
- def __init__(self, file: str | None = None) -> None:
151
- # Lazily import 'netrc'.
152
- # There's no need for us to load this module unless 'NetRCAuth' is being used.
153
- import netrc
154
-
155
- self._netrc_info = netrc.netrc(file)
156
-
157
- def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
158
- auth_info = self._netrc_info.authenticators(request.url.host)
159
- if auth_info is None or not auth_info[2]:
160
- # The netrc file did not have authentication credentials for this host.
161
- yield request
162
- else:
163
- # Build a basic auth header with credentials from the netrc file.
164
- request.headers["Authorization"] = self._build_auth_header(
165
- username=auth_info[0], password=auth_info[2]
166
- )
167
- yield request
168
-
169
- def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str:
170
- userpass = b":".join((to_bytes(username), to_bytes(password)))
171
- token = b64encode(userpass).decode()
172
- return f"Basic {token}"
173
-
174
-
175
- class DigestAuth(Auth):
176
- _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = {
177
- "MD5": hashlib.md5,
178
- "MD5-SESS": hashlib.md5,
179
- "SHA": hashlib.sha1,
180
- "SHA-SESS": hashlib.sha1,
181
- "SHA-256": hashlib.sha256,
182
- "SHA-256-SESS": hashlib.sha256,
183
- "SHA-512": hashlib.sha512,
184
- "SHA-512-SESS": hashlib.sha512,
185
- }
186
-
187
- def __init__(self, username: str | bytes, password: str | bytes) -> None:
188
- self._username = to_bytes(username)
189
- self._password = to_bytes(password)
190
- self._last_challenge: _DigestAuthChallenge | None = None
191
- self._nonce_count = 1
192
-
193
- def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
194
- if self._last_challenge:
195
- request.headers["Authorization"] = self._build_auth_header(
196
- request, self._last_challenge
197
- )
198
-
199
- response = yield request
200
-
201
- if response.status_code != 401 or "www-authenticate" not in response.headers:
202
- # If the response is not a 401 then we don't
203
- # need to build an authenticated request.
204
- return
205
-
206
- for auth_header in response.headers.get_list("www-authenticate"):
207
- if auth_header.lower().startswith("digest "):
208
- break
209
- else:
210
- # If the response does not include a 'WWW-Authenticate: Digest ...'
211
- # header, then we don't need to build an authenticated request.
212
- return
213
-
214
- self._last_challenge = self._parse_challenge(request, response, auth_header)
215
- self._nonce_count = 1
216
-
217
- request.headers["Authorization"] = self._build_auth_header(
218
- request, self._last_challenge
219
- )
220
- if response.cookies:
221
- Cookies(response.cookies).set_cookie_header(request=request)
222
- yield request
223
-
224
- def _parse_challenge(
225
- self, request: Request, response: Response, auth_header: str
226
- ) -> _DigestAuthChallenge:
227
- """
228
- Returns a challenge from a Digest WWW-Authenticate header.
229
- These take the form of:
230
- `Digest realm="[email protected]",qop="auth,auth-int",nonce="abc",opaque="xyz"`
231
- """
232
- scheme, _, fields = auth_header.partition(" ")
233
-
234
- # This method should only ever have been called with a Digest auth header.
235
- assert scheme.lower() == "digest"
236
-
237
- header_dict: dict[str, str] = {}
238
- for field in parse_http_list(fields):
239
- key, value = field.strip().split("=", 1)
240
- header_dict[key] = unquote(value)
241
-
242
- try:
243
- realm = header_dict["realm"].encode()
244
- nonce = header_dict["nonce"].encode()
245
- algorithm = header_dict.get("algorithm", "MD5")
246
- opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None
247
- qop = header_dict["qop"].encode() if "qop" in header_dict else None
248
- return _DigestAuthChallenge(
249
- realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop
250
- )
251
- except KeyError as exc:
252
- message = "Malformed Digest WWW-Authenticate header"
253
- raise ProtocolError(message, request=request) from exc
254
-
255
- def _build_auth_header(
256
- self, request: Request, challenge: _DigestAuthChallenge
257
- ) -> str:
258
- hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()]
259
-
260
- def digest(data: bytes) -> bytes:
261
- return hash_func(data).hexdigest().encode()
262
-
263
- A1 = b":".join((self._username, challenge.realm, self._password))
264
-
265
- path = request.url.raw_path
266
- A2 = b":".join((request.method.encode(), path))
267
- # TODO: implement auth-int
268
- HA2 = digest(A2)
269
-
270
- nc_value = b"%08x" % self._nonce_count
271
- cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce)
272
- self._nonce_count += 1
273
-
274
- HA1 = digest(A1)
275
- if challenge.algorithm.lower().endswith("-sess"):
276
- HA1 = digest(b":".join((HA1, challenge.nonce, cnonce)))
277
-
278
- qop = self._resolve_qop(challenge.qop, request=request)
279
- if qop is None:
280
- # Following RFC 2069
281
- digest_data = [HA1, challenge.nonce, HA2]
282
- else:
283
- # Following RFC 2617/7616
284
- digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2]
285
-
286
- format_args = {
287
- "username": self._username,
288
- "realm": challenge.realm,
289
- "nonce": challenge.nonce,
290
- "uri": path,
291
- "response": digest(b":".join(digest_data)),
292
- "algorithm": challenge.algorithm.encode(),
293
- }
294
- if challenge.opaque:
295
- format_args["opaque"] = challenge.opaque
296
- if qop:
297
- format_args["qop"] = b"auth"
298
- format_args["nc"] = nc_value
299
- format_args["cnonce"] = cnonce
300
-
301
- return "Digest " + self._get_header_value(format_args)
302
-
303
- def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes:
304
- s = str(nonce_count).encode()
305
- s += nonce
306
- s += time.ctime().encode()
307
- s += os.urandom(8)
308
-
309
- return hashlib.sha1(s).hexdigest()[:16].encode()
310
-
311
- def _get_header_value(self, header_fields: dict[str, bytes]) -> str:
312
- NON_QUOTED_FIELDS = ("algorithm", "qop", "nc")
313
- QUOTED_TEMPLATE = '{}="{}"'
314
- NON_QUOTED_TEMPLATE = "{}={}"
315
-
316
- header_value = ""
317
- for i, (field, value) in enumerate(header_fields.items()):
318
- if i > 0:
319
- header_value += ", "
320
- template = (
321
- QUOTED_TEMPLATE
322
- if field not in NON_QUOTED_FIELDS
323
- else NON_QUOTED_TEMPLATE
324
- )
325
- header_value += template.format(field, to_str(value))
326
-
327
- return header_value
328
-
329
- def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None:
330
- if qop is None:
331
- return None
332
- qops = re.split(b", ?", qop)
333
- if b"auth" in qops:
334
- return b"auth"
335
-
336
- if qops == [b"auth-int"]:
337
- raise NotImplementedError("Digest auth-int support is not yet implemented")
338
-
339
- message = f'Unexpected qop value "{qop!r}" in digest auth'
340
- raise ProtocolError(message, request=request)
341
-
342
-
343
- class _DigestAuthChallenge(typing.NamedTuple):
344
- realm: bytes
345
- nonce: bytes
346
- algorithm: str
347
- opaque: bytes | None
348
- qop: bytes | None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_client.py DELETED
@@ -1,2019 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import datetime
4
- import enum
5
- import logging
6
- import time
7
- import typing
8
- import warnings
9
- from contextlib import asynccontextmanager, contextmanager
10
- from types import TracebackType
11
-
12
- from .__version__ import __version__
13
- from ._auth import Auth, BasicAuth, FunctionAuth
14
- from ._config import (
15
- DEFAULT_LIMITS,
16
- DEFAULT_MAX_REDIRECTS,
17
- DEFAULT_TIMEOUT_CONFIG,
18
- Limits,
19
- Proxy,
20
- Timeout,
21
- )
22
- from ._decoders import SUPPORTED_DECODERS
23
- from ._exceptions import (
24
- InvalidURL,
25
- RemoteProtocolError,
26
- TooManyRedirects,
27
- request_context,
28
- )
29
- from ._models import Cookies, Headers, Request, Response
30
- from ._status_codes import codes
31
- from ._transports.base import AsyncBaseTransport, BaseTransport
32
- from ._transports.default import AsyncHTTPTransport, HTTPTransport
33
- from ._types import (
34
- AsyncByteStream,
35
- AuthTypes,
36
- CertTypes,
37
- CookieTypes,
38
- HeaderTypes,
39
- ProxyTypes,
40
- QueryParamTypes,
41
- RequestContent,
42
- RequestData,
43
- RequestExtensions,
44
- RequestFiles,
45
- SyncByteStream,
46
- TimeoutTypes,
47
- )
48
- from ._urls import URL, QueryParams
49
- from ._utils import URLPattern, get_environment_proxies
50
-
51
- if typing.TYPE_CHECKING:
52
- import ssl # pragma: no cover
53
-
54
- __all__ = ["USE_CLIENT_DEFAULT", "AsyncClient", "Client"]
55
-
56
- # The type annotation for @classmethod and context managers here follows PEP 484
57
- # https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods
58
- T = typing.TypeVar("T", bound="Client")
59
- U = typing.TypeVar("U", bound="AsyncClient")
60
-
61
-
62
- def _is_https_redirect(url: URL, location: URL) -> bool:
63
- """
64
- Return 'True' if 'location' is a HTTPS upgrade of 'url'
65
- """
66
- if url.host != location.host:
67
- return False
68
-
69
- return (
70
- url.scheme == "http"
71
- and _port_or_default(url) == 80
72
- and location.scheme == "https"
73
- and _port_or_default(location) == 443
74
- )
75
-
76
-
77
- def _port_or_default(url: URL) -> int | None:
78
- if url.port is not None:
79
- return url.port
80
- return {"http": 80, "https": 443}.get(url.scheme)
81
-
82
-
83
- def _same_origin(url: URL, other: URL) -> bool:
84
- """
85
- Return 'True' if the given URLs share the same origin.
86
- """
87
- return (
88
- url.scheme == other.scheme
89
- and url.host == other.host
90
- and _port_or_default(url) == _port_or_default(other)
91
- )
92
-
93
-
94
- class UseClientDefault:
95
- """
96
- For some parameters such as `auth=...` and `timeout=...` we need to be able
97
- to indicate the default "unset" state, in a way that is distinctly different
98
- to using `None`.
99
-
100
- The default "unset" state indicates that whatever default is set on the
101
- client should be used. This is different to setting `None`, which
102
- explicitly disables the parameter, possibly overriding a client default.
103
-
104
- For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature.
105
- Omitting the `timeout` parameter will send a request using whatever default
106
- timeout has been configured on the client. Including `timeout=None` will
107
- ensure no timeout is used.
108
-
109
- Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant,
110
- but it is used internally when a parameter is not included.
111
- """
112
-
113
-
114
- USE_CLIENT_DEFAULT = UseClientDefault()
115
-
116
-
117
- logger = logging.getLogger("httpx")
118
-
119
- USER_AGENT = f"python-httpx/{__version__}"
120
- ACCEPT_ENCODING = ", ".join(
121
- [key for key in SUPPORTED_DECODERS.keys() if key != "identity"]
122
- )
123
-
124
-
125
- class ClientState(enum.Enum):
126
- # UNOPENED:
127
- # The client has been instantiated, but has not been used to send a request,
128
- # or been opened by entering the context of a `with` block.
129
- UNOPENED = 1
130
- # OPENED:
131
- # The client has either sent a request, or is within a `with` block.
132
- OPENED = 2
133
- # CLOSED:
134
- # The client has either exited the `with` block, or `close()` has
135
- # been called explicitly.
136
- CLOSED = 3
137
-
138
-
139
- class BoundSyncStream(SyncByteStream):
140
- """
141
- A byte stream that is bound to a given response instance, and that
142
- ensures the `response.elapsed` is set once the response is closed.
143
- """
144
-
145
- def __init__(
146
- self, stream: SyncByteStream, response: Response, start: float
147
- ) -> None:
148
- self._stream = stream
149
- self._response = response
150
- self._start = start
151
-
152
- def __iter__(self) -> typing.Iterator[bytes]:
153
- for chunk in self._stream:
154
- yield chunk
155
-
156
- def close(self) -> None:
157
- elapsed = time.perf_counter() - self._start
158
- self._response.elapsed = datetime.timedelta(seconds=elapsed)
159
- self._stream.close()
160
-
161
-
162
- class BoundAsyncStream(AsyncByteStream):
163
- """
164
- An async byte stream that is bound to a given response instance, and that
165
- ensures the `response.elapsed` is set once the response is closed.
166
- """
167
-
168
- def __init__(
169
- self, stream: AsyncByteStream, response: Response, start: float
170
- ) -> None:
171
- self._stream = stream
172
- self._response = response
173
- self._start = start
174
-
175
- async def __aiter__(self) -> typing.AsyncIterator[bytes]:
176
- async for chunk in self._stream:
177
- yield chunk
178
-
179
- async def aclose(self) -> None:
180
- elapsed = time.perf_counter() - self._start
181
- self._response.elapsed = datetime.timedelta(seconds=elapsed)
182
- await self._stream.aclose()
183
-
184
-
185
- EventHook = typing.Callable[..., typing.Any]
186
-
187
-
188
- class BaseClient:
189
- def __init__(
190
- self,
191
- *,
192
- auth: AuthTypes | None = None,
193
- params: QueryParamTypes | None = None,
194
- headers: HeaderTypes | None = None,
195
- cookies: CookieTypes | None = None,
196
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
197
- follow_redirects: bool = False,
198
- max_redirects: int = DEFAULT_MAX_REDIRECTS,
199
- event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
200
- base_url: URL | str = "",
201
- trust_env: bool = True,
202
- default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
203
- ) -> None:
204
- event_hooks = {} if event_hooks is None else event_hooks
205
-
206
- self._base_url = self._enforce_trailing_slash(URL(base_url))
207
-
208
- self._auth = self._build_auth(auth)
209
- self._params = QueryParams(params)
210
- self.headers = Headers(headers)
211
- self._cookies = Cookies(cookies)
212
- self._timeout = Timeout(timeout)
213
- self.follow_redirects = follow_redirects
214
- self.max_redirects = max_redirects
215
- self._event_hooks = {
216
- "request": list(event_hooks.get("request", [])),
217
- "response": list(event_hooks.get("response", [])),
218
- }
219
- self._trust_env = trust_env
220
- self._default_encoding = default_encoding
221
- self._state = ClientState.UNOPENED
222
-
223
- @property
224
- def is_closed(self) -> bool:
225
- """
226
- Check if the client being closed
227
- """
228
- return self._state == ClientState.CLOSED
229
-
230
- @property
231
- def trust_env(self) -> bool:
232
- return self._trust_env
233
-
234
- def _enforce_trailing_slash(self, url: URL) -> URL:
235
- if url.raw_path.endswith(b"/"):
236
- return url
237
- return url.copy_with(raw_path=url.raw_path + b"/")
238
-
239
- def _get_proxy_map(
240
- self, proxy: ProxyTypes | None, allow_env_proxies: bool
241
- ) -> dict[str, Proxy | None]:
242
- if proxy is None:
243
- if allow_env_proxies:
244
- return {
245
- key: None if url is None else Proxy(url=url)
246
- for key, url in get_environment_proxies().items()
247
- }
248
- return {}
249
- else:
250
- proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy
251
- return {"all://": proxy}
252
-
253
- @property
254
- def timeout(self) -> Timeout:
255
- return self._timeout
256
-
257
- @timeout.setter
258
- def timeout(self, timeout: TimeoutTypes) -> None:
259
- self._timeout = Timeout(timeout)
260
-
261
- @property
262
- def event_hooks(self) -> dict[str, list[EventHook]]:
263
- return self._event_hooks
264
-
265
- @event_hooks.setter
266
- def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None:
267
- self._event_hooks = {
268
- "request": list(event_hooks.get("request", [])),
269
- "response": list(event_hooks.get("response", [])),
270
- }
271
-
272
- @property
273
- def auth(self) -> Auth | None:
274
- """
275
- Authentication class used when none is passed at the request-level.
276
-
277
- See also [Authentication][0].
278
-
279
- [0]: /quickstart/#authentication
280
- """
281
- return self._auth
282
-
283
- @auth.setter
284
- def auth(self, auth: AuthTypes) -> None:
285
- self._auth = self._build_auth(auth)
286
-
287
- @property
288
- def base_url(self) -> URL:
289
- """
290
- Base URL to use when sending requests with relative URLs.
291
- """
292
- return self._base_url
293
-
294
- @base_url.setter
295
- def base_url(self, url: URL | str) -> None:
296
- self._base_url = self._enforce_trailing_slash(URL(url))
297
-
298
- @property
299
- def headers(self) -> Headers:
300
- """
301
- HTTP headers to include when sending requests.
302
- """
303
- return self._headers
304
-
305
- @headers.setter
306
- def headers(self, headers: HeaderTypes) -> None:
307
- client_headers = Headers(
308
- {
309
- b"Accept": b"*/*",
310
- b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"),
311
- b"Connection": b"keep-alive",
312
- b"User-Agent": USER_AGENT.encode("ascii"),
313
- }
314
- )
315
- client_headers.update(headers)
316
- self._headers = client_headers
317
-
318
- @property
319
- def cookies(self) -> Cookies:
320
- """
321
- Cookie values to include when sending requests.
322
- """
323
- return self._cookies
324
-
325
- @cookies.setter
326
- def cookies(self, cookies: CookieTypes) -> None:
327
- self._cookies = Cookies(cookies)
328
-
329
- @property
330
- def params(self) -> QueryParams:
331
- """
332
- Query parameters to include in the URL when sending requests.
333
- """
334
- return self._params
335
-
336
- @params.setter
337
- def params(self, params: QueryParamTypes) -> None:
338
- self._params = QueryParams(params)
339
-
340
- def build_request(
341
- self,
342
- method: str,
343
- url: URL | str,
344
- *,
345
- content: RequestContent | None = None,
346
- data: RequestData | None = None,
347
- files: RequestFiles | None = None,
348
- json: typing.Any | None = None,
349
- params: QueryParamTypes | None = None,
350
- headers: HeaderTypes | None = None,
351
- cookies: CookieTypes | None = None,
352
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
353
- extensions: RequestExtensions | None = None,
354
- ) -> Request:
355
- """
356
- Build and return a request instance.
357
-
358
- * The `params`, `headers` and `cookies` arguments
359
- are merged with any values set on the client.
360
- * The `url` argument is merged with any `base_url` set on the client.
361
-
362
- See also: [Request instances][0]
363
-
364
- [0]: /advanced/clients/#request-instances
365
- """
366
- url = self._merge_url(url)
367
- headers = self._merge_headers(headers)
368
- cookies = self._merge_cookies(cookies)
369
- params = self._merge_queryparams(params)
370
- extensions = {} if extensions is None else extensions
371
- if "timeout" not in extensions:
372
- timeout = (
373
- self.timeout
374
- if isinstance(timeout, UseClientDefault)
375
- else Timeout(timeout)
376
- )
377
- extensions = dict(**extensions, timeout=timeout.as_dict())
378
- return Request(
379
- method,
380
- url,
381
- content=content,
382
- data=data,
383
- files=files,
384
- json=json,
385
- params=params,
386
- headers=headers,
387
- cookies=cookies,
388
- extensions=extensions,
389
- )
390
-
391
- def _merge_url(self, url: URL | str) -> URL:
392
- """
393
- Merge a URL argument together with any 'base_url' on the client,
394
- to create the URL used for the outgoing request.
395
- """
396
- merge_url = URL(url)
397
- if merge_url.is_relative_url:
398
- # To merge URLs we always append to the base URL. To get this
399
- # behaviour correct we always ensure the base URL ends in a '/'
400
- # separator, and strip any leading '/' from the merge URL.
401
- #
402
- # So, eg...
403
- #
404
- # >>> client = Client(base_url="https://www.example.com/subpath")
405
- # >>> client.base_url
406
- # URL('https://www.example.com/subpath/')
407
- # >>> client.build_request("GET", "/path").url
408
- # URL('https://www.example.com/subpath/path')
409
- merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/")
410
- return self.base_url.copy_with(raw_path=merge_raw_path)
411
- return merge_url
412
-
413
- def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None:
414
- """
415
- Merge a cookies argument together with any cookies on the client,
416
- to create the cookies used for the outgoing request.
417
- """
418
- if cookies or self.cookies:
419
- merged_cookies = Cookies(self.cookies)
420
- merged_cookies.update(cookies)
421
- return merged_cookies
422
- return cookies
423
-
424
- def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None:
425
- """
426
- Merge a headers argument together with any headers on the client,
427
- to create the headers used for the outgoing request.
428
- """
429
- merged_headers = Headers(self.headers)
430
- merged_headers.update(headers)
431
- return merged_headers
432
-
433
- def _merge_queryparams(
434
- self, params: QueryParamTypes | None = None
435
- ) -> QueryParamTypes | None:
436
- """
437
- Merge a queryparams argument together with any queryparams on the client,
438
- to create the queryparams used for the outgoing request.
439
- """
440
- if params or self.params:
441
- merged_queryparams = QueryParams(self.params)
442
- return merged_queryparams.merge(params)
443
- return params
444
-
445
- def _build_auth(self, auth: AuthTypes | None) -> Auth | None:
446
- if auth is None:
447
- return None
448
- elif isinstance(auth, tuple):
449
- return BasicAuth(username=auth[0], password=auth[1])
450
- elif isinstance(auth, Auth):
451
- return auth
452
- elif callable(auth):
453
- return FunctionAuth(func=auth)
454
- else:
455
- raise TypeError(f'Invalid "auth" argument: {auth!r}')
456
-
457
- def _build_request_auth(
458
- self,
459
- request: Request,
460
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
461
- ) -> Auth:
462
- auth = (
463
- self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth)
464
- )
465
-
466
- if auth is not None:
467
- return auth
468
-
469
- username, password = request.url.username, request.url.password
470
- if username or password:
471
- return BasicAuth(username=username, password=password)
472
-
473
- return Auth()
474
-
475
- def _build_redirect_request(self, request: Request, response: Response) -> Request:
476
- """
477
- Given a request and a redirect response, return a new request that
478
- should be used to effect the redirect.
479
- """
480
- method = self._redirect_method(request, response)
481
- url = self._redirect_url(request, response)
482
- headers = self._redirect_headers(request, url, method)
483
- stream = self._redirect_stream(request, method)
484
- cookies = Cookies(self.cookies)
485
- return Request(
486
- method=method,
487
- url=url,
488
- headers=headers,
489
- cookies=cookies,
490
- stream=stream,
491
- extensions=request.extensions,
492
- )
493
-
494
- def _redirect_method(self, request: Request, response: Response) -> str:
495
- """
496
- When being redirected we may want to change the method of the request
497
- based on certain specs or browser behavior.
498
- """
499
- method = request.method
500
-
501
- # https://tools.ietf.org/html/rfc7231#section-6.4.4
502
- if response.status_code == codes.SEE_OTHER and method != "HEAD":
503
- method = "GET"
504
-
505
- # Do what the browsers do, despite standards...
506
- # Turn 302s into GETs.
507
- if response.status_code == codes.FOUND and method != "HEAD":
508
- method = "GET"
509
-
510
- # If a POST is responded to with a 301, turn it into a GET.
511
- # This bizarre behaviour is explained in 'requests' issue 1704.
512
- if response.status_code == codes.MOVED_PERMANENTLY and method == "POST":
513
- method = "GET"
514
-
515
- return method
516
-
517
- def _redirect_url(self, request: Request, response: Response) -> URL:
518
- """
519
- Return the URL for the redirect to follow.
520
- """
521
- location = response.headers["Location"]
522
-
523
- try:
524
- url = URL(location)
525
- except InvalidURL as exc:
526
- raise RemoteProtocolError(
527
- f"Invalid URL in location header: {exc}.", request=request
528
- ) from None
529
-
530
- # Handle malformed 'Location' headers that are "absolute" form, have no host.
531
- # See: https://github.com/encode/httpx/issues/771
532
- if url.scheme and not url.host:
533
- url = url.copy_with(host=request.url.host)
534
-
535
- # Facilitate relative 'Location' headers, as allowed by RFC 7231.
536
- # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
537
- if url.is_relative_url:
538
- url = request.url.join(url)
539
-
540
- # Attach previous fragment if needed (RFC 7231 7.1.2)
541
- if request.url.fragment and not url.fragment:
542
- url = url.copy_with(fragment=request.url.fragment)
543
-
544
- return url
545
-
546
- def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers:
547
- """
548
- Return the headers that should be used for the redirect request.
549
- """
550
- headers = Headers(request.headers)
551
-
552
- if not _same_origin(url, request.url):
553
- if not _is_https_redirect(request.url, url):
554
- # Strip Authorization headers when responses are redirected
555
- # away from the origin. (Except for direct HTTP to HTTPS redirects.)
556
- headers.pop("Authorization", None)
557
-
558
- # Update the Host header.
559
- headers["Host"] = url.netloc.decode("ascii")
560
-
561
- if method != request.method and method == "GET":
562
- # If we've switch to a 'GET' request, then strip any headers which
563
- # are only relevant to the request body.
564
- headers.pop("Content-Length", None)
565
- headers.pop("Transfer-Encoding", None)
566
-
567
- # We should use the client cookie store to determine any cookie header,
568
- # rather than whatever was on the original outgoing request.
569
- headers.pop("Cookie", None)
570
-
571
- return headers
572
-
573
- def _redirect_stream(
574
- self, request: Request, method: str
575
- ) -> SyncByteStream | AsyncByteStream | None:
576
- """
577
- Return the body that should be used for the redirect request.
578
- """
579
- if method != request.method and method == "GET":
580
- return None
581
-
582
- return request.stream
583
-
584
- def _set_timeout(self, request: Request) -> None:
585
- if "timeout" not in request.extensions:
586
- timeout = (
587
- self.timeout
588
- if isinstance(self.timeout, UseClientDefault)
589
- else Timeout(self.timeout)
590
- )
591
- request.extensions = dict(**request.extensions, timeout=timeout.as_dict())
592
-
593
-
594
- class Client(BaseClient):
595
- """
596
- An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc.
597
-
598
- It can be shared between threads.
599
-
600
- Usage:
601
-
602
- ```python
603
- >>> client = httpx.Client()
604
- >>> response = client.get('https://example.org')
605
- ```
606
-
607
- **Parameters:**
608
-
609
- * **auth** - *(optional)* An authentication class to use when sending
610
- requests.
611
- * **params** - *(optional)* Query parameters to include in request URLs, as
612
- a string, dictionary, or sequence of two-tuples.
613
- * **headers** - *(optional)* Dictionary of HTTP headers to include when
614
- sending requests.
615
- * **cookies** - *(optional)* Dictionary of Cookie items to include when
616
- sending requests.
617
- * **verify** - *(optional)* Either `True` to use an SSL context with the
618
- default CA bundle, `False` to disable verification, or an instance of
619
- `ssl.SSLContext` to use a custom context.
620
- * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be
621
- enabled. Defaults to `False`.
622
- * **proxy** - *(optional)* A proxy URL where all the traffic should be routed.
623
- * **timeout** - *(optional)* The timeout configuration to use when sending
624
- requests.
625
- * **limits** - *(optional)* The limits configuration to use.
626
- * **max_redirects** - *(optional)* The maximum number of redirect responses
627
- that should be followed.
628
- * **base_url** - *(optional)* A URL to use as the base when building
629
- request URLs.
630
- * **transport** - *(optional)* A transport class to use for sending requests
631
- over the network.
632
- * **trust_env** - *(optional)* Enables or disables usage of environment
633
- variables for configuration.
634
- * **default_encoding** - *(optional)* The default encoding to use for decoding
635
- response text, if no charset information is included in a response Content-Type
636
- header. Set to a callable for automatic character set detection. Default: "utf-8".
637
- """
638
-
639
- def __init__(
640
- self,
641
- *,
642
- auth: AuthTypes | None = None,
643
- params: QueryParamTypes | None = None,
644
- headers: HeaderTypes | None = None,
645
- cookies: CookieTypes | None = None,
646
- verify: ssl.SSLContext | str | bool = True,
647
- cert: CertTypes | None = None,
648
- trust_env: bool = True,
649
- http1: bool = True,
650
- http2: bool = False,
651
- proxy: ProxyTypes | None = None,
652
- mounts: None | (typing.Mapping[str, BaseTransport | None]) = None,
653
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
654
- follow_redirects: bool = False,
655
- limits: Limits = DEFAULT_LIMITS,
656
- max_redirects: int = DEFAULT_MAX_REDIRECTS,
657
- event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
658
- base_url: URL | str = "",
659
- transport: BaseTransport | None = None,
660
- default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
661
- ) -> None:
662
- super().__init__(
663
- auth=auth,
664
- params=params,
665
- headers=headers,
666
- cookies=cookies,
667
- timeout=timeout,
668
- follow_redirects=follow_redirects,
669
- max_redirects=max_redirects,
670
- event_hooks=event_hooks,
671
- base_url=base_url,
672
- trust_env=trust_env,
673
- default_encoding=default_encoding,
674
- )
675
-
676
- if http2:
677
- try:
678
- import h2 # noqa
679
- except ImportError: # pragma: no cover
680
- raise ImportError(
681
- "Using http2=True, but the 'h2' package is not installed. "
682
- "Make sure to install httpx using `pip install httpx[http2]`."
683
- ) from None
684
-
685
- allow_env_proxies = trust_env and transport is None
686
- proxy_map = self._get_proxy_map(proxy, allow_env_proxies)
687
-
688
- self._transport = self._init_transport(
689
- verify=verify,
690
- cert=cert,
691
- trust_env=trust_env,
692
- http1=http1,
693
- http2=http2,
694
- limits=limits,
695
- transport=transport,
696
- )
697
- self._mounts: dict[URLPattern, BaseTransport | None] = {
698
- URLPattern(key): None
699
- if proxy is None
700
- else self._init_proxy_transport(
701
- proxy,
702
- verify=verify,
703
- cert=cert,
704
- trust_env=trust_env,
705
- http1=http1,
706
- http2=http2,
707
- limits=limits,
708
- )
709
- for key, proxy in proxy_map.items()
710
- }
711
- if mounts is not None:
712
- self._mounts.update(
713
- {URLPattern(key): transport for key, transport in mounts.items()}
714
- )
715
-
716
- self._mounts = dict(sorted(self._mounts.items()))
717
-
718
- def _init_transport(
719
- self,
720
- verify: ssl.SSLContext | str | bool = True,
721
- cert: CertTypes | None = None,
722
- trust_env: bool = True,
723
- http1: bool = True,
724
- http2: bool = False,
725
- limits: Limits = DEFAULT_LIMITS,
726
- transport: BaseTransport | None = None,
727
- ) -> BaseTransport:
728
- if transport is not None:
729
- return transport
730
-
731
- return HTTPTransport(
732
- verify=verify,
733
- cert=cert,
734
- trust_env=trust_env,
735
- http1=http1,
736
- http2=http2,
737
- limits=limits,
738
- )
739
-
740
- def _init_proxy_transport(
741
- self,
742
- proxy: Proxy,
743
- verify: ssl.SSLContext | str | bool = True,
744
- cert: CertTypes | None = None,
745
- trust_env: bool = True,
746
- http1: bool = True,
747
- http2: bool = False,
748
- limits: Limits = DEFAULT_LIMITS,
749
- ) -> BaseTransport:
750
- return HTTPTransport(
751
- verify=verify,
752
- cert=cert,
753
- trust_env=trust_env,
754
- http1=http1,
755
- http2=http2,
756
- limits=limits,
757
- proxy=proxy,
758
- )
759
-
760
- def _transport_for_url(self, url: URL) -> BaseTransport:
761
- """
762
- Returns the transport instance that should be used for a given URL.
763
- This will either be the standard connection pool, or a proxy.
764
- """
765
- for pattern, transport in self._mounts.items():
766
- if pattern.matches(url):
767
- return self._transport if transport is None else transport
768
-
769
- return self._transport
770
-
771
- def request(
772
- self,
773
- method: str,
774
- url: URL | str,
775
- *,
776
- content: RequestContent | None = None,
777
- data: RequestData | None = None,
778
- files: RequestFiles | None = None,
779
- json: typing.Any | None = None,
780
- params: QueryParamTypes | None = None,
781
- headers: HeaderTypes | None = None,
782
- cookies: CookieTypes | None = None,
783
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
784
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
785
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
786
- extensions: RequestExtensions | None = None,
787
- ) -> Response:
788
- """
789
- Build and send a request.
790
-
791
- Equivalent to:
792
-
793
- ```python
794
- request = client.build_request(...)
795
- response = client.send(request, ...)
796
- ```
797
-
798
- See `Client.build_request()`, `Client.send()` and
799
- [Merging of configuration][0] for how the various parameters
800
- are merged with client-level configuration.
801
-
802
- [0]: /advanced/clients/#merging-of-configuration
803
- """
804
- if cookies is not None:
805
- message = (
806
- "Setting per-request cookies=<...> is being deprecated, because "
807
- "the expected behaviour on cookie persistence is ambiguous. Set "
808
- "cookies directly on the client instance instead."
809
- )
810
- warnings.warn(message, DeprecationWarning, stacklevel=2)
811
-
812
- request = self.build_request(
813
- method=method,
814
- url=url,
815
- content=content,
816
- data=data,
817
- files=files,
818
- json=json,
819
- params=params,
820
- headers=headers,
821
- cookies=cookies,
822
- timeout=timeout,
823
- extensions=extensions,
824
- )
825
- return self.send(request, auth=auth, follow_redirects=follow_redirects)
826
-
827
- @contextmanager
828
- def stream(
829
- self,
830
- method: str,
831
- url: URL | str,
832
- *,
833
- content: RequestContent | None = None,
834
- data: RequestData | None = None,
835
- files: RequestFiles | None = None,
836
- json: typing.Any | None = None,
837
- params: QueryParamTypes | None = None,
838
- headers: HeaderTypes | None = None,
839
- cookies: CookieTypes | None = None,
840
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
841
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
842
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
843
- extensions: RequestExtensions | None = None,
844
- ) -> typing.Iterator[Response]:
845
- """
846
- Alternative to `httpx.request()` that streams the response body
847
- instead of loading it into memory at once.
848
-
849
- **Parameters**: See `httpx.request`.
850
-
851
- See also: [Streaming Responses][0]
852
-
853
- [0]: /quickstart#streaming-responses
854
- """
855
- request = self.build_request(
856
- method=method,
857
- url=url,
858
- content=content,
859
- data=data,
860
- files=files,
861
- json=json,
862
- params=params,
863
- headers=headers,
864
- cookies=cookies,
865
- timeout=timeout,
866
- extensions=extensions,
867
- )
868
- response = self.send(
869
- request=request,
870
- auth=auth,
871
- follow_redirects=follow_redirects,
872
- stream=True,
873
- )
874
- try:
875
- yield response
876
- finally:
877
- response.close()
878
-
879
- def send(
880
- self,
881
- request: Request,
882
- *,
883
- stream: bool = False,
884
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
885
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
886
- ) -> Response:
887
- """
888
- Send a request.
889
-
890
- The request is sent as-is, unmodified.
891
-
892
- Typically you'll want to build one with `Client.build_request()`
893
- so that any client-level configuration is merged into the request,
894
- but passing an explicit `httpx.Request()` is supported as well.
895
-
896
- See also: [Request instances][0]
897
-
898
- [0]: /advanced/clients/#request-instances
899
- """
900
- if self._state == ClientState.CLOSED:
901
- raise RuntimeError("Cannot send a request, as the client has been closed.")
902
-
903
- self._state = ClientState.OPENED
904
- follow_redirects = (
905
- self.follow_redirects
906
- if isinstance(follow_redirects, UseClientDefault)
907
- else follow_redirects
908
- )
909
-
910
- self._set_timeout(request)
911
-
912
- auth = self._build_request_auth(request, auth)
913
-
914
- response = self._send_handling_auth(
915
- request,
916
- auth=auth,
917
- follow_redirects=follow_redirects,
918
- history=[],
919
- )
920
- try:
921
- if not stream:
922
- response.read()
923
-
924
- return response
925
-
926
- except BaseException as exc:
927
- response.close()
928
- raise exc
929
-
930
- def _send_handling_auth(
931
- self,
932
- request: Request,
933
- auth: Auth,
934
- follow_redirects: bool,
935
- history: list[Response],
936
- ) -> Response:
937
- auth_flow = auth.sync_auth_flow(request)
938
- try:
939
- request = next(auth_flow)
940
-
941
- while True:
942
- response = self._send_handling_redirects(
943
- request,
944
- follow_redirects=follow_redirects,
945
- history=history,
946
- )
947
- try:
948
- try:
949
- next_request = auth_flow.send(response)
950
- except StopIteration:
951
- return response
952
-
953
- response.history = list(history)
954
- response.read()
955
- request = next_request
956
- history.append(response)
957
-
958
- except BaseException as exc:
959
- response.close()
960
- raise exc
961
- finally:
962
- auth_flow.close()
963
-
964
- def _send_handling_redirects(
965
- self,
966
- request: Request,
967
- follow_redirects: bool,
968
- history: list[Response],
969
- ) -> Response:
970
- while True:
971
- if len(history) > self.max_redirects:
972
- raise TooManyRedirects(
973
- "Exceeded maximum allowed redirects.", request=request
974
- )
975
-
976
- for hook in self._event_hooks["request"]:
977
- hook(request)
978
-
979
- response = self._send_single_request(request)
980
- try:
981
- for hook in self._event_hooks["response"]:
982
- hook(response)
983
- response.history = list(history)
984
-
985
- if not response.has_redirect_location:
986
- return response
987
-
988
- request = self._build_redirect_request(request, response)
989
- history = history + [response]
990
-
991
- if follow_redirects:
992
- response.read()
993
- else:
994
- response.next_request = request
995
- return response
996
-
997
- except BaseException as exc:
998
- response.close()
999
- raise exc
1000
-
1001
- def _send_single_request(self, request: Request) -> Response:
1002
- """
1003
- Sends a single request, without handling any redirections.
1004
- """
1005
- transport = self._transport_for_url(request.url)
1006
- start = time.perf_counter()
1007
-
1008
- if not isinstance(request.stream, SyncByteStream):
1009
- raise RuntimeError(
1010
- "Attempted to send an async request with a sync Client instance."
1011
- )
1012
-
1013
- with request_context(request=request):
1014
- response = transport.handle_request(request)
1015
-
1016
- assert isinstance(response.stream, SyncByteStream)
1017
-
1018
- response.request = request
1019
- response.stream = BoundSyncStream(
1020
- response.stream, response=response, start=start
1021
- )
1022
- self.cookies.extract_cookies(response)
1023
- response.default_encoding = self._default_encoding
1024
-
1025
- logger.info(
1026
- 'HTTP Request: %s %s "%s %d %s"',
1027
- request.method,
1028
- request.url,
1029
- response.http_version,
1030
- response.status_code,
1031
- response.reason_phrase,
1032
- )
1033
-
1034
- return response
1035
-
1036
- def get(
1037
- self,
1038
- url: URL | str,
1039
- *,
1040
- params: QueryParamTypes | None = None,
1041
- headers: HeaderTypes | None = None,
1042
- cookies: CookieTypes | None = None,
1043
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1044
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1045
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1046
- extensions: RequestExtensions | None = None,
1047
- ) -> Response:
1048
- """
1049
- Send a `GET` request.
1050
-
1051
- **Parameters**: See `httpx.request`.
1052
- """
1053
- return self.request(
1054
- "GET",
1055
- url,
1056
- params=params,
1057
- headers=headers,
1058
- cookies=cookies,
1059
- auth=auth,
1060
- follow_redirects=follow_redirects,
1061
- timeout=timeout,
1062
- extensions=extensions,
1063
- )
1064
-
1065
- def options(
1066
- self,
1067
- url: URL | str,
1068
- *,
1069
- params: QueryParamTypes | None = None,
1070
- headers: HeaderTypes | None = None,
1071
- cookies: CookieTypes | None = None,
1072
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1073
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1074
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1075
- extensions: RequestExtensions | None = None,
1076
- ) -> Response:
1077
- """
1078
- Send an `OPTIONS` request.
1079
-
1080
- **Parameters**: See `httpx.request`.
1081
- """
1082
- return self.request(
1083
- "OPTIONS",
1084
- url,
1085
- params=params,
1086
- headers=headers,
1087
- cookies=cookies,
1088
- auth=auth,
1089
- follow_redirects=follow_redirects,
1090
- timeout=timeout,
1091
- extensions=extensions,
1092
- )
1093
-
1094
- def head(
1095
- self,
1096
- url: URL | str,
1097
- *,
1098
- params: QueryParamTypes | None = None,
1099
- headers: HeaderTypes | None = None,
1100
- cookies: CookieTypes | None = None,
1101
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1102
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1103
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1104
- extensions: RequestExtensions | None = None,
1105
- ) -> Response:
1106
- """
1107
- Send a `HEAD` request.
1108
-
1109
- **Parameters**: See `httpx.request`.
1110
- """
1111
- return self.request(
1112
- "HEAD",
1113
- url,
1114
- params=params,
1115
- headers=headers,
1116
- cookies=cookies,
1117
- auth=auth,
1118
- follow_redirects=follow_redirects,
1119
- timeout=timeout,
1120
- extensions=extensions,
1121
- )
1122
-
1123
- def post(
1124
- self,
1125
- url: URL | str,
1126
- *,
1127
- content: RequestContent | None = None,
1128
- data: RequestData | None = None,
1129
- files: RequestFiles | None = None,
1130
- json: typing.Any | None = None,
1131
- params: QueryParamTypes | None = None,
1132
- headers: HeaderTypes | None = None,
1133
- cookies: CookieTypes | None = None,
1134
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1135
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1136
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1137
- extensions: RequestExtensions | None = None,
1138
- ) -> Response:
1139
- """
1140
- Send a `POST` request.
1141
-
1142
- **Parameters**: See `httpx.request`.
1143
- """
1144
- return self.request(
1145
- "POST",
1146
- url,
1147
- content=content,
1148
- data=data,
1149
- files=files,
1150
- json=json,
1151
- params=params,
1152
- headers=headers,
1153
- cookies=cookies,
1154
- auth=auth,
1155
- follow_redirects=follow_redirects,
1156
- timeout=timeout,
1157
- extensions=extensions,
1158
- )
1159
-
1160
- def put(
1161
- self,
1162
- url: URL | str,
1163
- *,
1164
- content: RequestContent | None = None,
1165
- data: RequestData | None = None,
1166
- files: RequestFiles | None = None,
1167
- json: typing.Any | None = None,
1168
- params: QueryParamTypes | None = None,
1169
- headers: HeaderTypes | None = None,
1170
- cookies: CookieTypes | None = None,
1171
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1172
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1173
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1174
- extensions: RequestExtensions | None = None,
1175
- ) -> Response:
1176
- """
1177
- Send a `PUT` request.
1178
-
1179
- **Parameters**: See `httpx.request`.
1180
- """
1181
- return self.request(
1182
- "PUT",
1183
- url,
1184
- content=content,
1185
- data=data,
1186
- files=files,
1187
- json=json,
1188
- params=params,
1189
- headers=headers,
1190
- cookies=cookies,
1191
- auth=auth,
1192
- follow_redirects=follow_redirects,
1193
- timeout=timeout,
1194
- extensions=extensions,
1195
- )
1196
-
1197
- def patch(
1198
- self,
1199
- url: URL | str,
1200
- *,
1201
- content: RequestContent | None = None,
1202
- data: RequestData | None = None,
1203
- files: RequestFiles | None = None,
1204
- json: typing.Any | None = None,
1205
- params: QueryParamTypes | None = None,
1206
- headers: HeaderTypes | None = None,
1207
- cookies: CookieTypes | None = None,
1208
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1209
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1210
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1211
- extensions: RequestExtensions | None = None,
1212
- ) -> Response:
1213
- """
1214
- Send a `PATCH` request.
1215
-
1216
- **Parameters**: See `httpx.request`.
1217
- """
1218
- return self.request(
1219
- "PATCH",
1220
- url,
1221
- content=content,
1222
- data=data,
1223
- files=files,
1224
- json=json,
1225
- params=params,
1226
- headers=headers,
1227
- cookies=cookies,
1228
- auth=auth,
1229
- follow_redirects=follow_redirects,
1230
- timeout=timeout,
1231
- extensions=extensions,
1232
- )
1233
-
1234
- def delete(
1235
- self,
1236
- url: URL | str,
1237
- *,
1238
- params: QueryParamTypes | None = None,
1239
- headers: HeaderTypes | None = None,
1240
- cookies: CookieTypes | None = None,
1241
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1242
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1243
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1244
- extensions: RequestExtensions | None = None,
1245
- ) -> Response:
1246
- """
1247
- Send a `DELETE` request.
1248
-
1249
- **Parameters**: See `httpx.request`.
1250
- """
1251
- return self.request(
1252
- "DELETE",
1253
- url,
1254
- params=params,
1255
- headers=headers,
1256
- cookies=cookies,
1257
- auth=auth,
1258
- follow_redirects=follow_redirects,
1259
- timeout=timeout,
1260
- extensions=extensions,
1261
- )
1262
-
1263
- def close(self) -> None:
1264
- """
1265
- Close transport and proxies.
1266
- """
1267
- if self._state != ClientState.CLOSED:
1268
- self._state = ClientState.CLOSED
1269
-
1270
- self._transport.close()
1271
- for transport in self._mounts.values():
1272
- if transport is not None:
1273
- transport.close()
1274
-
1275
- def __enter__(self: T) -> T:
1276
- if self._state != ClientState.UNOPENED:
1277
- msg = {
1278
- ClientState.OPENED: "Cannot open a client instance more than once.",
1279
- ClientState.CLOSED: (
1280
- "Cannot reopen a client instance, once it has been closed."
1281
- ),
1282
- }[self._state]
1283
- raise RuntimeError(msg)
1284
-
1285
- self._state = ClientState.OPENED
1286
-
1287
- self._transport.__enter__()
1288
- for transport in self._mounts.values():
1289
- if transport is not None:
1290
- transport.__enter__()
1291
- return self
1292
-
1293
- def __exit__(
1294
- self,
1295
- exc_type: type[BaseException] | None = None,
1296
- exc_value: BaseException | None = None,
1297
- traceback: TracebackType | None = None,
1298
- ) -> None:
1299
- self._state = ClientState.CLOSED
1300
-
1301
- self._transport.__exit__(exc_type, exc_value, traceback)
1302
- for transport in self._mounts.values():
1303
- if transport is not None:
1304
- transport.__exit__(exc_type, exc_value, traceback)
1305
-
1306
-
1307
- class AsyncClient(BaseClient):
1308
- """
1309
- An asynchronous HTTP client, with connection pooling, HTTP/2, redirects,
1310
- cookie persistence, etc.
1311
-
1312
- It can be shared between tasks.
1313
-
1314
- Usage:
1315
-
1316
- ```python
1317
- >>> async with httpx.AsyncClient() as client:
1318
- >>> response = await client.get('https://example.org')
1319
- ```
1320
-
1321
- **Parameters:**
1322
-
1323
- * **auth** - *(optional)* An authentication class to use when sending
1324
- requests.
1325
- * **params** - *(optional)* Query parameters to include in request URLs, as
1326
- a string, dictionary, or sequence of two-tuples.
1327
- * **headers** - *(optional)* Dictionary of HTTP headers to include when
1328
- sending requests.
1329
- * **cookies** - *(optional)* Dictionary of Cookie items to include when
1330
- sending requests.
1331
- * **verify** - *(optional)* Either `True` to use an SSL context with the
1332
- default CA bundle, `False` to disable verification, or an instance of
1333
- `ssl.SSLContext` to use a custom context.
1334
- * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be
1335
- enabled. Defaults to `False`.
1336
- * **proxy** - *(optional)* A proxy URL where all the traffic should be routed.
1337
- * **timeout** - *(optional)* The timeout configuration to use when sending
1338
- requests.
1339
- * **limits** - *(optional)* The limits configuration to use.
1340
- * **max_redirects** - *(optional)* The maximum number of redirect responses
1341
- that should be followed.
1342
- * **base_url** - *(optional)* A URL to use as the base when building
1343
- request URLs.
1344
- * **transport** - *(optional)* A transport class to use for sending requests
1345
- over the network.
1346
- * **trust_env** - *(optional)* Enables or disables usage of environment
1347
- variables for configuration.
1348
- * **default_encoding** - *(optional)* The default encoding to use for decoding
1349
- response text, if no charset information is included in a response Content-Type
1350
- header. Set to a callable for automatic character set detection. Default: "utf-8".
1351
- """
1352
-
1353
- def __init__(
1354
- self,
1355
- *,
1356
- auth: AuthTypes | None = None,
1357
- params: QueryParamTypes | None = None,
1358
- headers: HeaderTypes | None = None,
1359
- cookies: CookieTypes | None = None,
1360
- verify: ssl.SSLContext | str | bool = True,
1361
- cert: CertTypes | None = None,
1362
- http1: bool = True,
1363
- http2: bool = False,
1364
- proxy: ProxyTypes | None = None,
1365
- mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None,
1366
- timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
1367
- follow_redirects: bool = False,
1368
- limits: Limits = DEFAULT_LIMITS,
1369
- max_redirects: int = DEFAULT_MAX_REDIRECTS,
1370
- event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
1371
- base_url: URL | str = "",
1372
- transport: AsyncBaseTransport | None = None,
1373
- trust_env: bool = True,
1374
- default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
1375
- ) -> None:
1376
- super().__init__(
1377
- auth=auth,
1378
- params=params,
1379
- headers=headers,
1380
- cookies=cookies,
1381
- timeout=timeout,
1382
- follow_redirects=follow_redirects,
1383
- max_redirects=max_redirects,
1384
- event_hooks=event_hooks,
1385
- base_url=base_url,
1386
- trust_env=trust_env,
1387
- default_encoding=default_encoding,
1388
- )
1389
-
1390
- if http2:
1391
- try:
1392
- import h2 # noqa
1393
- except ImportError: # pragma: no cover
1394
- raise ImportError(
1395
- "Using http2=True, but the 'h2' package is not installed. "
1396
- "Make sure to install httpx using `pip install httpx[http2]`."
1397
- ) from None
1398
-
1399
- allow_env_proxies = trust_env and transport is None
1400
- proxy_map = self._get_proxy_map(proxy, allow_env_proxies)
1401
-
1402
- self._transport = self._init_transport(
1403
- verify=verify,
1404
- cert=cert,
1405
- trust_env=trust_env,
1406
- http1=http1,
1407
- http2=http2,
1408
- limits=limits,
1409
- transport=transport,
1410
- )
1411
-
1412
- self._mounts: dict[URLPattern, AsyncBaseTransport | None] = {
1413
- URLPattern(key): None
1414
- if proxy is None
1415
- else self._init_proxy_transport(
1416
- proxy,
1417
- verify=verify,
1418
- cert=cert,
1419
- trust_env=trust_env,
1420
- http1=http1,
1421
- http2=http2,
1422
- limits=limits,
1423
- )
1424
- for key, proxy in proxy_map.items()
1425
- }
1426
- if mounts is not None:
1427
- self._mounts.update(
1428
- {URLPattern(key): transport for key, transport in mounts.items()}
1429
- )
1430
- self._mounts = dict(sorted(self._mounts.items()))
1431
-
1432
- def _init_transport(
1433
- self,
1434
- verify: ssl.SSLContext | str | bool = True,
1435
- cert: CertTypes | None = None,
1436
- trust_env: bool = True,
1437
- http1: bool = True,
1438
- http2: bool = False,
1439
- limits: Limits = DEFAULT_LIMITS,
1440
- transport: AsyncBaseTransport | None = None,
1441
- ) -> AsyncBaseTransport:
1442
- if transport is not None:
1443
- return transport
1444
-
1445
- return AsyncHTTPTransport(
1446
- verify=verify,
1447
- cert=cert,
1448
- trust_env=trust_env,
1449
- http1=http1,
1450
- http2=http2,
1451
- limits=limits,
1452
- )
1453
-
1454
- def _init_proxy_transport(
1455
- self,
1456
- proxy: Proxy,
1457
- verify: ssl.SSLContext | str | bool = True,
1458
- cert: CertTypes | None = None,
1459
- trust_env: bool = True,
1460
- http1: bool = True,
1461
- http2: bool = False,
1462
- limits: Limits = DEFAULT_LIMITS,
1463
- ) -> AsyncBaseTransport:
1464
- return AsyncHTTPTransport(
1465
- verify=verify,
1466
- cert=cert,
1467
- trust_env=trust_env,
1468
- http1=http1,
1469
- http2=http2,
1470
- limits=limits,
1471
- proxy=proxy,
1472
- )
1473
-
1474
- def _transport_for_url(self, url: URL) -> AsyncBaseTransport:
1475
- """
1476
- Returns the transport instance that should be used for a given URL.
1477
- This will either be the standard connection pool, or a proxy.
1478
- """
1479
- for pattern, transport in self._mounts.items():
1480
- if pattern.matches(url):
1481
- return self._transport if transport is None else transport
1482
-
1483
- return self._transport
1484
-
1485
- async def request(
1486
- self,
1487
- method: str,
1488
- url: URL | str,
1489
- *,
1490
- content: RequestContent | None = None,
1491
- data: RequestData | None = None,
1492
- files: RequestFiles | None = None,
1493
- json: typing.Any | None = None,
1494
- params: QueryParamTypes | None = None,
1495
- headers: HeaderTypes | None = None,
1496
- cookies: CookieTypes | None = None,
1497
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1498
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1499
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1500
- extensions: RequestExtensions | None = None,
1501
- ) -> Response:
1502
- """
1503
- Build and send a request.
1504
-
1505
- Equivalent to:
1506
-
1507
- ```python
1508
- request = client.build_request(...)
1509
- response = await client.send(request, ...)
1510
- ```
1511
-
1512
- See `AsyncClient.build_request()`, `AsyncClient.send()`
1513
- and [Merging of configuration][0] for how the various parameters
1514
- are merged with client-level configuration.
1515
-
1516
- [0]: /advanced/clients/#merging-of-configuration
1517
- """
1518
-
1519
- if cookies is not None: # pragma: no cover
1520
- message = (
1521
- "Setting per-request cookies=<...> is being deprecated, because "
1522
- "the expected behaviour on cookie persistence is ambiguous. Set "
1523
- "cookies directly on the client instance instead."
1524
- )
1525
- warnings.warn(message, DeprecationWarning, stacklevel=2)
1526
-
1527
- request = self.build_request(
1528
- method=method,
1529
- url=url,
1530
- content=content,
1531
- data=data,
1532
- files=files,
1533
- json=json,
1534
- params=params,
1535
- headers=headers,
1536
- cookies=cookies,
1537
- timeout=timeout,
1538
- extensions=extensions,
1539
- )
1540
- return await self.send(request, auth=auth, follow_redirects=follow_redirects)
1541
-
1542
- @asynccontextmanager
1543
- async def stream(
1544
- self,
1545
- method: str,
1546
- url: URL | str,
1547
- *,
1548
- content: RequestContent | None = None,
1549
- data: RequestData | None = None,
1550
- files: RequestFiles | None = None,
1551
- json: typing.Any | None = None,
1552
- params: QueryParamTypes | None = None,
1553
- headers: HeaderTypes | None = None,
1554
- cookies: CookieTypes | None = None,
1555
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1556
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1557
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1558
- extensions: RequestExtensions | None = None,
1559
- ) -> typing.AsyncIterator[Response]:
1560
- """
1561
- Alternative to `httpx.request()` that streams the response body
1562
- instead of loading it into memory at once.
1563
-
1564
- **Parameters**: See `httpx.request`.
1565
-
1566
- See also: [Streaming Responses][0]
1567
-
1568
- [0]: /quickstart#streaming-responses
1569
- """
1570
- request = self.build_request(
1571
- method=method,
1572
- url=url,
1573
- content=content,
1574
- data=data,
1575
- files=files,
1576
- json=json,
1577
- params=params,
1578
- headers=headers,
1579
- cookies=cookies,
1580
- timeout=timeout,
1581
- extensions=extensions,
1582
- )
1583
- response = await self.send(
1584
- request=request,
1585
- auth=auth,
1586
- follow_redirects=follow_redirects,
1587
- stream=True,
1588
- )
1589
- try:
1590
- yield response
1591
- finally:
1592
- await response.aclose()
1593
-
1594
- async def send(
1595
- self,
1596
- request: Request,
1597
- *,
1598
- stream: bool = False,
1599
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1600
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1601
- ) -> Response:
1602
- """
1603
- Send a request.
1604
-
1605
- The request is sent as-is, unmodified.
1606
-
1607
- Typically you'll want to build one with `AsyncClient.build_request()`
1608
- so that any client-level configuration is merged into the request,
1609
- but passing an explicit `httpx.Request()` is supported as well.
1610
-
1611
- See also: [Request instances][0]
1612
-
1613
- [0]: /advanced/clients/#request-instances
1614
- """
1615
- if self._state == ClientState.CLOSED:
1616
- raise RuntimeError("Cannot send a request, as the client has been closed.")
1617
-
1618
- self._state = ClientState.OPENED
1619
- follow_redirects = (
1620
- self.follow_redirects
1621
- if isinstance(follow_redirects, UseClientDefault)
1622
- else follow_redirects
1623
- )
1624
-
1625
- self._set_timeout(request)
1626
-
1627
- auth = self._build_request_auth(request, auth)
1628
-
1629
- response = await self._send_handling_auth(
1630
- request,
1631
- auth=auth,
1632
- follow_redirects=follow_redirects,
1633
- history=[],
1634
- )
1635
- try:
1636
- if not stream:
1637
- await response.aread()
1638
-
1639
- return response
1640
-
1641
- except BaseException as exc:
1642
- await response.aclose()
1643
- raise exc
1644
-
1645
- async def _send_handling_auth(
1646
- self,
1647
- request: Request,
1648
- auth: Auth,
1649
- follow_redirects: bool,
1650
- history: list[Response],
1651
- ) -> Response:
1652
- auth_flow = auth.async_auth_flow(request)
1653
- try:
1654
- request = await auth_flow.__anext__()
1655
-
1656
- while True:
1657
- response = await self._send_handling_redirects(
1658
- request,
1659
- follow_redirects=follow_redirects,
1660
- history=history,
1661
- )
1662
- try:
1663
- try:
1664
- next_request = await auth_flow.asend(response)
1665
- except StopAsyncIteration:
1666
- return response
1667
-
1668
- response.history = list(history)
1669
- await response.aread()
1670
- request = next_request
1671
- history.append(response)
1672
-
1673
- except BaseException as exc:
1674
- await response.aclose()
1675
- raise exc
1676
- finally:
1677
- await auth_flow.aclose()
1678
-
1679
- async def _send_handling_redirects(
1680
- self,
1681
- request: Request,
1682
- follow_redirects: bool,
1683
- history: list[Response],
1684
- ) -> Response:
1685
- while True:
1686
- if len(history) > self.max_redirects:
1687
- raise TooManyRedirects(
1688
- "Exceeded maximum allowed redirects.", request=request
1689
- )
1690
-
1691
- for hook in self._event_hooks["request"]:
1692
- await hook(request)
1693
-
1694
- response = await self._send_single_request(request)
1695
- try:
1696
- for hook in self._event_hooks["response"]:
1697
- await hook(response)
1698
-
1699
- response.history = list(history)
1700
-
1701
- if not response.has_redirect_location:
1702
- return response
1703
-
1704
- request = self._build_redirect_request(request, response)
1705
- history = history + [response]
1706
-
1707
- if follow_redirects:
1708
- await response.aread()
1709
- else:
1710
- response.next_request = request
1711
- return response
1712
-
1713
- except BaseException as exc:
1714
- await response.aclose()
1715
- raise exc
1716
-
1717
- async def _send_single_request(self, request: Request) -> Response:
1718
- """
1719
- Sends a single request, without handling any redirections.
1720
- """
1721
- transport = self._transport_for_url(request.url)
1722
- start = time.perf_counter()
1723
-
1724
- if not isinstance(request.stream, AsyncByteStream):
1725
- raise RuntimeError(
1726
- "Attempted to send an sync request with an AsyncClient instance."
1727
- )
1728
-
1729
- with request_context(request=request):
1730
- response = await transport.handle_async_request(request)
1731
-
1732
- assert isinstance(response.stream, AsyncByteStream)
1733
- response.request = request
1734
- response.stream = BoundAsyncStream(
1735
- response.stream, response=response, start=start
1736
- )
1737
- self.cookies.extract_cookies(response)
1738
- response.default_encoding = self._default_encoding
1739
-
1740
- logger.info(
1741
- 'HTTP Request: %s %s "%s %d %s"',
1742
- request.method,
1743
- request.url,
1744
- response.http_version,
1745
- response.status_code,
1746
- response.reason_phrase,
1747
- )
1748
-
1749
- return response
1750
-
1751
- async def get(
1752
- self,
1753
- url: URL | str,
1754
- *,
1755
- params: QueryParamTypes | None = None,
1756
- headers: HeaderTypes | None = None,
1757
- cookies: CookieTypes | None = None,
1758
- auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1759
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1760
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1761
- extensions: RequestExtensions | None = None,
1762
- ) -> Response:
1763
- """
1764
- Send a `GET` request.
1765
-
1766
- **Parameters**: See `httpx.request`.
1767
- """
1768
- return await self.request(
1769
- "GET",
1770
- url,
1771
- params=params,
1772
- headers=headers,
1773
- cookies=cookies,
1774
- auth=auth,
1775
- follow_redirects=follow_redirects,
1776
- timeout=timeout,
1777
- extensions=extensions,
1778
- )
1779
-
1780
- async def options(
1781
- self,
1782
- url: URL | str,
1783
- *,
1784
- params: QueryParamTypes | None = None,
1785
- headers: HeaderTypes | None = None,
1786
- cookies: CookieTypes | None = None,
1787
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1788
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1789
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1790
- extensions: RequestExtensions | None = None,
1791
- ) -> Response:
1792
- """
1793
- Send an `OPTIONS` request.
1794
-
1795
- **Parameters**: See `httpx.request`.
1796
- """
1797
- return await self.request(
1798
- "OPTIONS",
1799
- url,
1800
- params=params,
1801
- headers=headers,
1802
- cookies=cookies,
1803
- auth=auth,
1804
- follow_redirects=follow_redirects,
1805
- timeout=timeout,
1806
- extensions=extensions,
1807
- )
1808
-
1809
- async def head(
1810
- self,
1811
- url: URL | str,
1812
- *,
1813
- params: QueryParamTypes | None = None,
1814
- headers: HeaderTypes | None = None,
1815
- cookies: CookieTypes | None = None,
1816
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1817
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1818
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1819
- extensions: RequestExtensions | None = None,
1820
- ) -> Response:
1821
- """
1822
- Send a `HEAD` request.
1823
-
1824
- **Parameters**: See `httpx.request`.
1825
- """
1826
- return await self.request(
1827
- "HEAD",
1828
- url,
1829
- params=params,
1830
- headers=headers,
1831
- cookies=cookies,
1832
- auth=auth,
1833
- follow_redirects=follow_redirects,
1834
- timeout=timeout,
1835
- extensions=extensions,
1836
- )
1837
-
1838
- async def post(
1839
- self,
1840
- url: URL | str,
1841
- *,
1842
- content: RequestContent | None = None,
1843
- data: RequestData | None = None,
1844
- files: RequestFiles | None = None,
1845
- json: typing.Any | None = None,
1846
- params: QueryParamTypes | None = None,
1847
- headers: HeaderTypes | None = None,
1848
- cookies: CookieTypes | None = None,
1849
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1850
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1851
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1852
- extensions: RequestExtensions | None = None,
1853
- ) -> Response:
1854
- """
1855
- Send a `POST` request.
1856
-
1857
- **Parameters**: See `httpx.request`.
1858
- """
1859
- return await self.request(
1860
- "POST",
1861
- url,
1862
- content=content,
1863
- data=data,
1864
- files=files,
1865
- json=json,
1866
- params=params,
1867
- headers=headers,
1868
- cookies=cookies,
1869
- auth=auth,
1870
- follow_redirects=follow_redirects,
1871
- timeout=timeout,
1872
- extensions=extensions,
1873
- )
1874
-
1875
- async def put(
1876
- self,
1877
- url: URL | str,
1878
- *,
1879
- content: RequestContent | None = None,
1880
- data: RequestData | None = None,
1881
- files: RequestFiles | None = None,
1882
- json: typing.Any | None = None,
1883
- params: QueryParamTypes | None = None,
1884
- headers: HeaderTypes | None = None,
1885
- cookies: CookieTypes | None = None,
1886
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1887
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1888
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1889
- extensions: RequestExtensions | None = None,
1890
- ) -> Response:
1891
- """
1892
- Send a `PUT` request.
1893
-
1894
- **Parameters**: See `httpx.request`.
1895
- """
1896
- return await self.request(
1897
- "PUT",
1898
- url,
1899
- content=content,
1900
- data=data,
1901
- files=files,
1902
- json=json,
1903
- params=params,
1904
- headers=headers,
1905
- cookies=cookies,
1906
- auth=auth,
1907
- follow_redirects=follow_redirects,
1908
- timeout=timeout,
1909
- extensions=extensions,
1910
- )
1911
-
1912
- async def patch(
1913
- self,
1914
- url: URL | str,
1915
- *,
1916
- content: RequestContent | None = None,
1917
- data: RequestData | None = None,
1918
- files: RequestFiles | None = None,
1919
- json: typing.Any | None = None,
1920
- params: QueryParamTypes | None = None,
1921
- headers: HeaderTypes | None = None,
1922
- cookies: CookieTypes | None = None,
1923
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1924
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1925
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1926
- extensions: RequestExtensions | None = None,
1927
- ) -> Response:
1928
- """
1929
- Send a `PATCH` request.
1930
-
1931
- **Parameters**: See `httpx.request`.
1932
- """
1933
- return await self.request(
1934
- "PATCH",
1935
- url,
1936
- content=content,
1937
- data=data,
1938
- files=files,
1939
- json=json,
1940
- params=params,
1941
- headers=headers,
1942
- cookies=cookies,
1943
- auth=auth,
1944
- follow_redirects=follow_redirects,
1945
- timeout=timeout,
1946
- extensions=extensions,
1947
- )
1948
-
1949
- async def delete(
1950
- self,
1951
- url: URL | str,
1952
- *,
1953
- params: QueryParamTypes | None = None,
1954
- headers: HeaderTypes | None = None,
1955
- cookies: CookieTypes | None = None,
1956
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1957
- follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1958
- timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1959
- extensions: RequestExtensions | None = None,
1960
- ) -> Response:
1961
- """
1962
- Send a `DELETE` request.
1963
-
1964
- **Parameters**: See `httpx.request`.
1965
- """
1966
- return await self.request(
1967
- "DELETE",
1968
- url,
1969
- params=params,
1970
- headers=headers,
1971
- cookies=cookies,
1972
- auth=auth,
1973
- follow_redirects=follow_redirects,
1974
- timeout=timeout,
1975
- extensions=extensions,
1976
- )
1977
-
1978
- async def aclose(self) -> None:
1979
- """
1980
- Close transport and proxies.
1981
- """
1982
- if self._state != ClientState.CLOSED:
1983
- self._state = ClientState.CLOSED
1984
-
1985
- await self._transport.aclose()
1986
- for proxy in self._mounts.values():
1987
- if proxy is not None:
1988
- await proxy.aclose()
1989
-
1990
- async def __aenter__(self: U) -> U:
1991
- if self._state != ClientState.UNOPENED:
1992
- msg = {
1993
- ClientState.OPENED: "Cannot open a client instance more than once.",
1994
- ClientState.CLOSED: (
1995
- "Cannot reopen a client instance, once it has been closed."
1996
- ),
1997
- }[self._state]
1998
- raise RuntimeError(msg)
1999
-
2000
- self._state = ClientState.OPENED
2001
-
2002
- await self._transport.__aenter__()
2003
- for proxy in self._mounts.values():
2004
- if proxy is not None:
2005
- await proxy.__aenter__()
2006
- return self
2007
-
2008
- async def __aexit__(
2009
- self,
2010
- exc_type: type[BaseException] | None = None,
2011
- exc_value: BaseException | None = None,
2012
- traceback: TracebackType | None = None,
2013
- ) -> None:
2014
- self._state = ClientState.CLOSED
2015
-
2016
- await self._transport.__aexit__(exc_type, exc_value, traceback)
2017
- for proxy in self._mounts.values():
2018
- if proxy is not None:
2019
- await proxy.__aexit__(exc_type, exc_value, traceback)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_config.py DELETED
@@ -1,248 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import os
4
- import typing
5
-
6
- from ._models import Headers
7
- from ._types import CertTypes, HeaderTypes, TimeoutTypes
8
- from ._urls import URL
9
-
10
- if typing.TYPE_CHECKING:
11
- import ssl # pragma: no cover
12
-
13
- __all__ = ["Limits", "Proxy", "Timeout", "create_ssl_context"]
14
-
15
-
16
- class UnsetType:
17
- pass # pragma: no cover
18
-
19
-
20
- UNSET = UnsetType()
21
-
22
-
23
- def create_ssl_context(
24
- verify: ssl.SSLContext | str | bool = True,
25
- cert: CertTypes | None = None,
26
- trust_env: bool = True,
27
- ) -> ssl.SSLContext:
28
- import ssl
29
- import warnings
30
-
31
- import certifi
32
-
33
- if verify is True:
34
- if trust_env and os.environ.get("SSL_CERT_FILE"): # pragma: nocover
35
- ctx = ssl.create_default_context(cafile=os.environ["SSL_CERT_FILE"])
36
- elif trust_env and os.environ.get("SSL_CERT_DIR"): # pragma: nocover
37
- ctx = ssl.create_default_context(capath=os.environ["SSL_CERT_DIR"])
38
- else:
39
- # Default case...
40
- ctx = ssl.create_default_context(cafile=certifi.where())
41
- elif verify is False:
42
- ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
43
- ctx.check_hostname = False
44
- ctx.verify_mode = ssl.CERT_NONE
45
- elif isinstance(verify, str): # pragma: nocover
46
- message = (
47
- "`verify=<str>` is deprecated. "
48
- "Use `verify=ssl.create_default_context(cafile=...)` "
49
- "or `verify=ssl.create_default_context(capath=...)` instead."
50
- )
51
- warnings.warn(message, DeprecationWarning)
52
- if os.path.isdir(verify):
53
- return ssl.create_default_context(capath=verify)
54
- return ssl.create_default_context(cafile=verify)
55
- else:
56
- ctx = verify
57
-
58
- if cert: # pragma: nocover
59
- message = (
60
- "`cert=...` is deprecated. Use `verify=<ssl_context>` instead,"
61
- "with `.load_cert_chain()` to configure the certificate chain."
62
- )
63
- warnings.warn(message, DeprecationWarning)
64
- if isinstance(cert, str):
65
- ctx.load_cert_chain(cert)
66
- else:
67
- ctx.load_cert_chain(*cert)
68
-
69
- return ctx
70
-
71
-
72
- class Timeout:
73
- """
74
- Timeout configuration.
75
-
76
- **Usage**:
77
-
78
- Timeout(None) # No timeouts.
79
- Timeout(5.0) # 5s timeout on all operations.
80
- Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts.
81
- Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere.
82
- Timeout(5.0, pool=None) # No timeout on acquiring connection from pool.
83
- # 5s timeout elsewhere.
84
- """
85
-
86
- def __init__(
87
- self,
88
- timeout: TimeoutTypes | UnsetType = UNSET,
89
- *,
90
- connect: None | float | UnsetType = UNSET,
91
- read: None | float | UnsetType = UNSET,
92
- write: None | float | UnsetType = UNSET,
93
- pool: None | float | UnsetType = UNSET,
94
- ) -> None:
95
- if isinstance(timeout, Timeout):
96
- # Passed as a single explicit Timeout.
97
- assert connect is UNSET
98
- assert read is UNSET
99
- assert write is UNSET
100
- assert pool is UNSET
101
- self.connect = timeout.connect # type: typing.Optional[float]
102
- self.read = timeout.read # type: typing.Optional[float]
103
- self.write = timeout.write # type: typing.Optional[float]
104
- self.pool = timeout.pool # type: typing.Optional[float]
105
- elif isinstance(timeout, tuple):
106
- # Passed as a tuple.
107
- self.connect = timeout[0]
108
- self.read = timeout[1]
109
- self.write = None if len(timeout) < 3 else timeout[2]
110
- self.pool = None if len(timeout) < 4 else timeout[3]
111
- elif not (
112
- isinstance(connect, UnsetType)
113
- or isinstance(read, UnsetType)
114
- or isinstance(write, UnsetType)
115
- or isinstance(pool, UnsetType)
116
- ):
117
- self.connect = connect
118
- self.read = read
119
- self.write = write
120
- self.pool = pool
121
- else:
122
- if isinstance(timeout, UnsetType):
123
- raise ValueError(
124
- "httpx.Timeout must either include a default, or set all "
125
- "four parameters explicitly."
126
- )
127
- self.connect = timeout if isinstance(connect, UnsetType) else connect
128
- self.read = timeout if isinstance(read, UnsetType) else read
129
- self.write = timeout if isinstance(write, UnsetType) else write
130
- self.pool = timeout if isinstance(pool, UnsetType) else pool
131
-
132
- def as_dict(self) -> dict[str, float | None]:
133
- return {
134
- "connect": self.connect,
135
- "read": self.read,
136
- "write": self.write,
137
- "pool": self.pool,
138
- }
139
-
140
- def __eq__(self, other: typing.Any) -> bool:
141
- return (
142
- isinstance(other, self.__class__)
143
- and self.connect == other.connect
144
- and self.read == other.read
145
- and self.write == other.write
146
- and self.pool == other.pool
147
- )
148
-
149
- def __repr__(self) -> str:
150
- class_name = self.__class__.__name__
151
- if len({self.connect, self.read, self.write, self.pool}) == 1:
152
- return f"{class_name}(timeout={self.connect})"
153
- return (
154
- f"{class_name}(connect={self.connect}, "
155
- f"read={self.read}, write={self.write}, pool={self.pool})"
156
- )
157
-
158
-
159
- class Limits:
160
- """
161
- Configuration for limits to various client behaviors.
162
-
163
- **Parameters:**
164
-
165
- * **max_connections** - The maximum number of concurrent connections that may be
166
- established.
167
- * **max_keepalive_connections** - Allow the connection pool to maintain
168
- keep-alive connections below this point. Should be less than or equal
169
- to `max_connections`.
170
- * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds.
171
- """
172
-
173
- def __init__(
174
- self,
175
- *,
176
- max_connections: int | None = None,
177
- max_keepalive_connections: int | None = None,
178
- keepalive_expiry: float | None = 5.0,
179
- ) -> None:
180
- self.max_connections = max_connections
181
- self.max_keepalive_connections = max_keepalive_connections
182
- self.keepalive_expiry = keepalive_expiry
183
-
184
- def __eq__(self, other: typing.Any) -> bool:
185
- return (
186
- isinstance(other, self.__class__)
187
- and self.max_connections == other.max_connections
188
- and self.max_keepalive_connections == other.max_keepalive_connections
189
- and self.keepalive_expiry == other.keepalive_expiry
190
- )
191
-
192
- def __repr__(self) -> str:
193
- class_name = self.__class__.__name__
194
- return (
195
- f"{class_name}(max_connections={self.max_connections}, "
196
- f"max_keepalive_connections={self.max_keepalive_connections}, "
197
- f"keepalive_expiry={self.keepalive_expiry})"
198
- )
199
-
200
-
201
- class Proxy:
202
- def __init__(
203
- self,
204
- url: URL | str,
205
- *,
206
- ssl_context: ssl.SSLContext | None = None,
207
- auth: tuple[str, str] | None = None,
208
- headers: HeaderTypes | None = None,
209
- ) -> None:
210
- url = URL(url)
211
- headers = Headers(headers)
212
-
213
- if url.scheme not in ("http", "https", "socks5", "socks5h"):
214
- raise ValueError(f"Unknown scheme for proxy URL {url!r}")
215
-
216
- if url.username or url.password:
217
- # Remove any auth credentials from the URL.
218
- auth = (url.username, url.password)
219
- url = url.copy_with(username=None, password=None)
220
-
221
- self.url = url
222
- self.auth = auth
223
- self.headers = headers
224
- self.ssl_context = ssl_context
225
-
226
- @property
227
- def raw_auth(self) -> tuple[bytes, bytes] | None:
228
- # The proxy authentication as raw bytes.
229
- return (
230
- None
231
- if self.auth is None
232
- else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8"))
233
- )
234
-
235
- def __repr__(self) -> str:
236
- # The authentication is represented with the password component masked.
237
- auth = (self.auth[0], "********") if self.auth else None
238
-
239
- # Build a nice concise representation.
240
- url_str = f"{str(self.url)!r}"
241
- auth_str = f", auth={auth!r}" if auth else ""
242
- headers_str = f", headers={dict(self.headers)!r}" if self.headers else ""
243
- return f"Proxy({url_str}{auth_str}{headers_str})"
244
-
245
-
246
- DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0)
247
- DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20)
248
- DEFAULT_MAX_REDIRECTS = 20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_content.py DELETED
@@ -1,240 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import inspect
4
- import warnings
5
- from json import dumps as json_dumps
6
- from typing import (
7
- Any,
8
- AsyncIterable,
9
- AsyncIterator,
10
- Iterable,
11
- Iterator,
12
- Mapping,
13
- )
14
- from urllib.parse import urlencode
15
-
16
- from ._exceptions import StreamClosed, StreamConsumed
17
- from ._multipart import MultipartStream
18
- from ._types import (
19
- AsyncByteStream,
20
- RequestContent,
21
- RequestData,
22
- RequestFiles,
23
- ResponseContent,
24
- SyncByteStream,
25
- )
26
- from ._utils import peek_filelike_length, primitive_value_to_str
27
-
28
- __all__ = ["ByteStream"]
29
-
30
-
31
- class ByteStream(AsyncByteStream, SyncByteStream):
32
- def __init__(self, stream: bytes) -> None:
33
- self._stream = stream
34
-
35
- def __iter__(self) -> Iterator[bytes]:
36
- yield self._stream
37
-
38
- async def __aiter__(self) -> AsyncIterator[bytes]:
39
- yield self._stream
40
-
41
-
42
- class IteratorByteStream(SyncByteStream):
43
- CHUNK_SIZE = 65_536
44
-
45
- def __init__(self, stream: Iterable[bytes]) -> None:
46
- self._stream = stream
47
- self._is_stream_consumed = False
48
- self._is_generator = inspect.isgenerator(stream)
49
-
50
- def __iter__(self) -> Iterator[bytes]:
51
- if self._is_stream_consumed and self._is_generator:
52
- raise StreamConsumed()
53
-
54
- self._is_stream_consumed = True
55
- if hasattr(self._stream, "read"):
56
- # File-like interfaces should use 'read' directly.
57
- chunk = self._stream.read(self.CHUNK_SIZE)
58
- while chunk:
59
- yield chunk
60
- chunk = self._stream.read(self.CHUNK_SIZE)
61
- else:
62
- # Otherwise iterate.
63
- for part in self._stream:
64
- yield part
65
-
66
-
67
- class AsyncIteratorByteStream(AsyncByteStream):
68
- CHUNK_SIZE = 65_536
69
-
70
- def __init__(self, stream: AsyncIterable[bytes]) -> None:
71
- self._stream = stream
72
- self._is_stream_consumed = False
73
- self._is_generator = inspect.isasyncgen(stream)
74
-
75
- async def __aiter__(self) -> AsyncIterator[bytes]:
76
- if self._is_stream_consumed and self._is_generator:
77
- raise StreamConsumed()
78
-
79
- self._is_stream_consumed = True
80
- if hasattr(self._stream, "aread"):
81
- # File-like interfaces should use 'aread' directly.
82
- chunk = await self._stream.aread(self.CHUNK_SIZE)
83
- while chunk:
84
- yield chunk
85
- chunk = await self._stream.aread(self.CHUNK_SIZE)
86
- else:
87
- # Otherwise iterate.
88
- async for part in self._stream:
89
- yield part
90
-
91
-
92
- class UnattachedStream(AsyncByteStream, SyncByteStream):
93
- """
94
- If a request or response is serialized using pickle, then it is no longer
95
- attached to a stream for I/O purposes. Any stream operations should result
96
- in `httpx.StreamClosed`.
97
- """
98
-
99
- def __iter__(self) -> Iterator[bytes]:
100
- raise StreamClosed()
101
-
102
- async def __aiter__(self) -> AsyncIterator[bytes]:
103
- raise StreamClosed()
104
- yield b"" # pragma: no cover
105
-
106
-
107
- def encode_content(
108
- content: str | bytes | Iterable[bytes] | AsyncIterable[bytes],
109
- ) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]:
110
- if isinstance(content, (bytes, str)):
111
- body = content.encode("utf-8") if isinstance(content, str) else content
112
- content_length = len(body)
113
- headers = {"Content-Length": str(content_length)} if body else {}
114
- return headers, ByteStream(body)
115
-
116
- elif isinstance(content, Iterable) and not isinstance(content, dict):
117
- # `not isinstance(content, dict)` is a bit oddly specific, but it
118
- # catches a case that's easy for users to make in error, and would
119
- # otherwise pass through here, like any other bytes-iterable,
120
- # because `dict` happens to be iterable. See issue #2491.
121
- content_length_or_none = peek_filelike_length(content)
122
-
123
- if content_length_or_none is None:
124
- headers = {"Transfer-Encoding": "chunked"}
125
- else:
126
- headers = {"Content-Length": str(content_length_or_none)}
127
- return headers, IteratorByteStream(content) # type: ignore
128
-
129
- elif isinstance(content, AsyncIterable):
130
- headers = {"Transfer-Encoding": "chunked"}
131
- return headers, AsyncIteratorByteStream(content)
132
-
133
- raise TypeError(f"Unexpected type for 'content', {type(content)!r}")
134
-
135
-
136
- def encode_urlencoded_data(
137
- data: RequestData,
138
- ) -> tuple[dict[str, str], ByteStream]:
139
- plain_data = []
140
- for key, value in data.items():
141
- if isinstance(value, (list, tuple)):
142
- plain_data.extend([(key, primitive_value_to_str(item)) for item in value])
143
- else:
144
- plain_data.append((key, primitive_value_to_str(value)))
145
- body = urlencode(plain_data, doseq=True).encode("utf-8")
146
- content_length = str(len(body))
147
- content_type = "application/x-www-form-urlencoded"
148
- headers = {"Content-Length": content_length, "Content-Type": content_type}
149
- return headers, ByteStream(body)
150
-
151
-
152
- def encode_multipart_data(
153
- data: RequestData, files: RequestFiles, boundary: bytes | None
154
- ) -> tuple[dict[str, str], MultipartStream]:
155
- multipart = MultipartStream(data=data, files=files, boundary=boundary)
156
- headers = multipart.get_headers()
157
- return headers, multipart
158
-
159
-
160
- def encode_text(text: str) -> tuple[dict[str, str], ByteStream]:
161
- body = text.encode("utf-8")
162
- content_length = str(len(body))
163
- content_type = "text/plain; charset=utf-8"
164
- headers = {"Content-Length": content_length, "Content-Type": content_type}
165
- return headers, ByteStream(body)
166
-
167
-
168
- def encode_html(html: str) -> tuple[dict[str, str], ByteStream]:
169
- body = html.encode("utf-8")
170
- content_length = str(len(body))
171
- content_type = "text/html; charset=utf-8"
172
- headers = {"Content-Length": content_length, "Content-Type": content_type}
173
- return headers, ByteStream(body)
174
-
175
-
176
- def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]:
177
- body = json_dumps(
178
- json, ensure_ascii=False, separators=(",", ":"), allow_nan=False
179
- ).encode("utf-8")
180
- content_length = str(len(body))
181
- content_type = "application/json"
182
- headers = {"Content-Length": content_length, "Content-Type": content_type}
183
- return headers, ByteStream(body)
184
-
185
-
186
- def encode_request(
187
- content: RequestContent | None = None,
188
- data: RequestData | None = None,
189
- files: RequestFiles | None = None,
190
- json: Any | None = None,
191
- boundary: bytes | None = None,
192
- ) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]:
193
- """
194
- Handles encoding the given `content`, `data`, `files`, and `json`,
195
- returning a two-tuple of (<headers>, <stream>).
196
- """
197
- if data is not None and not isinstance(data, Mapping):
198
- # We prefer to separate `content=<bytes|str|byte iterator|bytes aiterator>`
199
- # for raw request content, and `data=<form data>` for url encoded or
200
- # multipart form content.
201
- #
202
- # However for compat with requests, we *do* still support
203
- # `data=<bytes...>` usages. We deal with that case here, treating it
204
- # as if `content=<...>` had been supplied instead.
205
- message = "Use 'content=<...>' to upload raw bytes/text content."
206
- warnings.warn(message, DeprecationWarning, stacklevel=2)
207
- return encode_content(data)
208
-
209
- if content is not None:
210
- return encode_content(content)
211
- elif files:
212
- return encode_multipart_data(data or {}, files, boundary)
213
- elif data:
214
- return encode_urlencoded_data(data)
215
- elif json is not None:
216
- return encode_json(json)
217
-
218
- return {}, ByteStream(b"")
219
-
220
-
221
- def encode_response(
222
- content: ResponseContent | None = None,
223
- text: str | None = None,
224
- html: str | None = None,
225
- json: Any | None = None,
226
- ) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]:
227
- """
228
- Handles encoding the given `content`, returning a two-tuple of
229
- (<headers>, <stream>).
230
- """
231
- if content is not None:
232
- return encode_content(content)
233
- elif text is not None:
234
- return encode_text(text)
235
- elif html is not None:
236
- return encode_html(html)
237
- elif json is not None:
238
- return encode_json(json)
239
-
240
- return {}, ByteStream(b"")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_decoders.py DELETED
@@ -1,393 +0,0 @@
1
- """
2
- Handlers for Content-Encoding.
3
-
4
- See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
5
- """
6
-
7
- from __future__ import annotations
8
-
9
- import codecs
10
- import io
11
- import typing
12
- import zlib
13
-
14
- from ._exceptions import DecodingError
15
-
16
- # Brotli support is optional
17
- try:
18
- # The C bindings in `brotli` are recommended for CPython.
19
- import brotli
20
- except ImportError: # pragma: no cover
21
- try:
22
- # The CFFI bindings in `brotlicffi` are recommended for PyPy
23
- # and other environments.
24
- import brotlicffi as brotli
25
- except ImportError:
26
- brotli = None
27
-
28
-
29
- # Zstandard support is optional
30
- try:
31
- import zstandard
32
- except ImportError: # pragma: no cover
33
- zstandard = None # type: ignore
34
-
35
-
36
- class ContentDecoder:
37
- def decode(self, data: bytes) -> bytes:
38
- raise NotImplementedError() # pragma: no cover
39
-
40
- def flush(self) -> bytes:
41
- raise NotImplementedError() # pragma: no cover
42
-
43
-
44
- class IdentityDecoder(ContentDecoder):
45
- """
46
- Handle unencoded data.
47
- """
48
-
49
- def decode(self, data: bytes) -> bytes:
50
- return data
51
-
52
- def flush(self) -> bytes:
53
- return b""
54
-
55
-
56
- class DeflateDecoder(ContentDecoder):
57
- """
58
- Handle 'deflate' decoding.
59
-
60
- See: https://stackoverflow.com/questions/1838699
61
- """
62
-
63
- def __init__(self) -> None:
64
- self.first_attempt = True
65
- self.decompressor = zlib.decompressobj()
66
-
67
- def decode(self, data: bytes) -> bytes:
68
- was_first_attempt = self.first_attempt
69
- self.first_attempt = False
70
- try:
71
- return self.decompressor.decompress(data)
72
- except zlib.error as exc:
73
- if was_first_attempt:
74
- self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS)
75
- return self.decode(data)
76
- raise DecodingError(str(exc)) from exc
77
-
78
- def flush(self) -> bytes:
79
- try:
80
- return self.decompressor.flush()
81
- except zlib.error as exc: # pragma: no cover
82
- raise DecodingError(str(exc)) from exc
83
-
84
-
85
- class GZipDecoder(ContentDecoder):
86
- """
87
- Handle 'gzip' decoding.
88
-
89
- See: https://stackoverflow.com/questions/1838699
90
- """
91
-
92
- def __init__(self) -> None:
93
- self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16)
94
-
95
- def decode(self, data: bytes) -> bytes:
96
- try:
97
- return self.decompressor.decompress(data)
98
- except zlib.error as exc:
99
- raise DecodingError(str(exc)) from exc
100
-
101
- def flush(self) -> bytes:
102
- try:
103
- return self.decompressor.flush()
104
- except zlib.error as exc: # pragma: no cover
105
- raise DecodingError(str(exc)) from exc
106
-
107
-
108
- class BrotliDecoder(ContentDecoder):
109
- """
110
- Handle 'brotli' decoding.
111
-
112
- Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/
113
- or `pip install brotli`. See https://github.com/google/brotli
114
- Supports both 'brotlipy' and 'Brotli' packages since they share an import
115
- name. The top branches are for 'brotlipy' and bottom branches for 'Brotli'
116
- """
117
-
118
- def __init__(self) -> None:
119
- if brotli is None: # pragma: no cover
120
- raise ImportError(
121
- "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' "
122
- "packages have been installed. "
123
- "Make sure to install httpx using `pip install httpx[brotli]`."
124
- ) from None
125
-
126
- self.decompressor = brotli.Decompressor()
127
- self.seen_data = False
128
- self._decompress: typing.Callable[[bytes], bytes]
129
- if hasattr(self.decompressor, "decompress"):
130
- # The 'brotlicffi' package.
131
- self._decompress = self.decompressor.decompress # pragma: no cover
132
- else:
133
- # The 'brotli' package.
134
- self._decompress = self.decompressor.process # pragma: no cover
135
-
136
- def decode(self, data: bytes) -> bytes:
137
- if not data:
138
- return b""
139
- self.seen_data = True
140
- try:
141
- return self._decompress(data)
142
- except brotli.error as exc:
143
- raise DecodingError(str(exc)) from exc
144
-
145
- def flush(self) -> bytes:
146
- if not self.seen_data:
147
- return b""
148
- try:
149
- if hasattr(self.decompressor, "finish"):
150
- # Only available in the 'brotlicffi' package.
151
-
152
- # As the decompressor decompresses eagerly, this
153
- # will never actually emit any data. However, it will potentially throw
154
- # errors if a truncated or damaged data stream has been used.
155
- self.decompressor.finish() # pragma: no cover
156
- return b""
157
- except brotli.error as exc: # pragma: no cover
158
- raise DecodingError(str(exc)) from exc
159
-
160
-
161
- class ZStandardDecoder(ContentDecoder):
162
- """
163
- Handle 'zstd' RFC 8878 decoding.
164
-
165
- Requires `pip install zstandard`.
166
- Can be installed as a dependency of httpx using `pip install httpx[zstd]`.
167
- """
168
-
169
- # inspired by the ZstdDecoder implementation in urllib3
170
- def __init__(self) -> None:
171
- if zstandard is None: # pragma: no cover
172
- raise ImportError(
173
- "Using 'ZStandardDecoder', ..."
174
- "Make sure to install httpx using `pip install httpx[zstd]`."
175
- ) from None
176
-
177
- self.decompressor = zstandard.ZstdDecompressor().decompressobj()
178
- self.seen_data = False
179
-
180
- def decode(self, data: bytes) -> bytes:
181
- assert zstandard is not None
182
- self.seen_data = True
183
- output = io.BytesIO()
184
- try:
185
- output.write(self.decompressor.decompress(data))
186
- while self.decompressor.eof and self.decompressor.unused_data:
187
- unused_data = self.decompressor.unused_data
188
- self.decompressor = zstandard.ZstdDecompressor().decompressobj()
189
- output.write(self.decompressor.decompress(unused_data))
190
- except zstandard.ZstdError as exc:
191
- raise DecodingError(str(exc)) from exc
192
- return output.getvalue()
193
-
194
- def flush(self) -> bytes:
195
- if not self.seen_data:
196
- return b""
197
- ret = self.decompressor.flush() # note: this is a no-op
198
- if not self.decompressor.eof:
199
- raise DecodingError("Zstandard data is incomplete") # pragma: no cover
200
- return bytes(ret)
201
-
202
-
203
- class MultiDecoder(ContentDecoder):
204
- """
205
- Handle the case where multiple encodings have been applied.
206
- """
207
-
208
- def __init__(self, children: typing.Sequence[ContentDecoder]) -> None:
209
- """
210
- 'children' should be a sequence of decoders in the order in which
211
- each was applied.
212
- """
213
- # Note that we reverse the order for decoding.
214
- self.children = list(reversed(children))
215
-
216
- def decode(self, data: bytes) -> bytes:
217
- for child in self.children:
218
- data = child.decode(data)
219
- return data
220
-
221
- def flush(self) -> bytes:
222
- data = b""
223
- for child in self.children:
224
- data = child.decode(data) + child.flush()
225
- return data
226
-
227
-
228
- class ByteChunker:
229
- """
230
- Handles returning byte content in fixed-size chunks.
231
- """
232
-
233
- def __init__(self, chunk_size: int | None = None) -> None:
234
- self._buffer = io.BytesIO()
235
- self._chunk_size = chunk_size
236
-
237
- def decode(self, content: bytes) -> list[bytes]:
238
- if self._chunk_size is None:
239
- return [content] if content else []
240
-
241
- self._buffer.write(content)
242
- if self._buffer.tell() >= self._chunk_size:
243
- value = self._buffer.getvalue()
244
- chunks = [
245
- value[i : i + self._chunk_size]
246
- for i in range(0, len(value), self._chunk_size)
247
- ]
248
- if len(chunks[-1]) == self._chunk_size:
249
- self._buffer.seek(0)
250
- self._buffer.truncate()
251
- return chunks
252
- else:
253
- self._buffer.seek(0)
254
- self._buffer.write(chunks[-1])
255
- self._buffer.truncate()
256
- return chunks[:-1]
257
- else:
258
- return []
259
-
260
- def flush(self) -> list[bytes]:
261
- value = self._buffer.getvalue()
262
- self._buffer.seek(0)
263
- self._buffer.truncate()
264
- return [value] if value else []
265
-
266
-
267
- class TextChunker:
268
- """
269
- Handles returning text content in fixed-size chunks.
270
- """
271
-
272
- def __init__(self, chunk_size: int | None = None) -> None:
273
- self._buffer = io.StringIO()
274
- self._chunk_size = chunk_size
275
-
276
- def decode(self, content: str) -> list[str]:
277
- if self._chunk_size is None:
278
- return [content] if content else []
279
-
280
- self._buffer.write(content)
281
- if self._buffer.tell() >= self._chunk_size:
282
- value = self._buffer.getvalue()
283
- chunks = [
284
- value[i : i + self._chunk_size]
285
- for i in range(0, len(value), self._chunk_size)
286
- ]
287
- if len(chunks[-1]) == self._chunk_size:
288
- self._buffer.seek(0)
289
- self._buffer.truncate()
290
- return chunks
291
- else:
292
- self._buffer.seek(0)
293
- self._buffer.write(chunks[-1])
294
- self._buffer.truncate()
295
- return chunks[:-1]
296
- else:
297
- return []
298
-
299
- def flush(self) -> list[str]:
300
- value = self._buffer.getvalue()
301
- self._buffer.seek(0)
302
- self._buffer.truncate()
303
- return [value] if value else []
304
-
305
-
306
- class TextDecoder:
307
- """
308
- Handles incrementally decoding bytes into text
309
- """
310
-
311
- def __init__(self, encoding: str = "utf-8") -> None:
312
- self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace")
313
-
314
- def decode(self, data: bytes) -> str:
315
- return self.decoder.decode(data)
316
-
317
- def flush(self) -> str:
318
- return self.decoder.decode(b"", True)
319
-
320
-
321
- class LineDecoder:
322
- """
323
- Handles incrementally reading lines from text.
324
-
325
- Has the same behaviour as the stdllib splitlines,
326
- but handling the input iteratively.
327
- """
328
-
329
- def __init__(self) -> None:
330
- self.buffer: list[str] = []
331
- self.trailing_cr: bool = False
332
-
333
- def decode(self, text: str) -> list[str]:
334
- # See https://docs.python.org/3/library/stdtypes.html#str.splitlines
335
- NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029"
336
-
337
- # We always push a trailing `\r` into the next decode iteration.
338
- if self.trailing_cr:
339
- text = "\r" + text
340
- self.trailing_cr = False
341
- if text.endswith("\r"):
342
- self.trailing_cr = True
343
- text = text[:-1]
344
-
345
- if not text:
346
- # NOTE: the edge case input of empty text doesn't occur in practice,
347
- # because other httpx internals filter out this value
348
- return [] # pragma: no cover
349
-
350
- trailing_newline = text[-1] in NEWLINE_CHARS
351
- lines = text.splitlines()
352
-
353
- if len(lines) == 1 and not trailing_newline:
354
- # No new lines, buffer the input and continue.
355
- self.buffer.append(lines[0])
356
- return []
357
-
358
- if self.buffer:
359
- # Include any existing buffer in the first portion of the
360
- # splitlines result.
361
- lines = ["".join(self.buffer) + lines[0]] + lines[1:]
362
- self.buffer = []
363
-
364
- if not trailing_newline:
365
- # If the last segment of splitlines is not newline terminated,
366
- # then drop it from our output and start a new buffer.
367
- self.buffer = [lines.pop()]
368
-
369
- return lines
370
-
371
- def flush(self) -> list[str]:
372
- if not self.buffer and not self.trailing_cr:
373
- return []
374
-
375
- lines = ["".join(self.buffer)]
376
- self.buffer = []
377
- self.trailing_cr = False
378
- return lines
379
-
380
-
381
- SUPPORTED_DECODERS = {
382
- "identity": IdentityDecoder,
383
- "gzip": GZipDecoder,
384
- "deflate": DeflateDecoder,
385
- "br": BrotliDecoder,
386
- "zstd": ZStandardDecoder,
387
- }
388
-
389
-
390
- if brotli is None:
391
- SUPPORTED_DECODERS.pop("br") # pragma: no cover
392
- if zstandard is None:
393
- SUPPORTED_DECODERS.pop("zstd") # pragma: no cover
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_exceptions.py DELETED
@@ -1,379 +0,0 @@
1
- """
2
- Our exception hierarchy:
3
-
4
- * HTTPError
5
- x RequestError
6
- + TransportError
7
- - TimeoutException
8
- · ConnectTimeout
9
- · ReadTimeout
10
- · WriteTimeout
11
- · PoolTimeout
12
- - NetworkError
13
- · ConnectError
14
- · ReadError
15
- · WriteError
16
- · CloseError
17
- - ProtocolError
18
- · LocalProtocolError
19
- · RemoteProtocolError
20
- - ProxyError
21
- - UnsupportedProtocol
22
- + DecodingError
23
- + TooManyRedirects
24
- x HTTPStatusError
25
- * InvalidURL
26
- * CookieConflict
27
- * StreamError
28
- x StreamConsumed
29
- x StreamClosed
30
- x ResponseNotRead
31
- x RequestNotRead
32
- """
33
-
34
- from __future__ import annotations
35
-
36
- import contextlib
37
- import typing
38
-
39
- if typing.TYPE_CHECKING:
40
- from ._models import Request, Response # pragma: no cover
41
-
42
- __all__ = [
43
- "CloseError",
44
- "ConnectError",
45
- "ConnectTimeout",
46
- "CookieConflict",
47
- "DecodingError",
48
- "HTTPError",
49
- "HTTPStatusError",
50
- "InvalidURL",
51
- "LocalProtocolError",
52
- "NetworkError",
53
- "PoolTimeout",
54
- "ProtocolError",
55
- "ProxyError",
56
- "ReadError",
57
- "ReadTimeout",
58
- "RemoteProtocolError",
59
- "RequestError",
60
- "RequestNotRead",
61
- "ResponseNotRead",
62
- "StreamClosed",
63
- "StreamConsumed",
64
- "StreamError",
65
- "TimeoutException",
66
- "TooManyRedirects",
67
- "TransportError",
68
- "UnsupportedProtocol",
69
- "WriteError",
70
- "WriteTimeout",
71
- ]
72
-
73
-
74
- class HTTPError(Exception):
75
- """
76
- Base class for `RequestError` and `HTTPStatusError`.
77
-
78
- Useful for `try...except` blocks when issuing a request,
79
- and then calling `.raise_for_status()`.
80
-
81
- For example:
82
-
83
- ```
84
- try:
85
- response = httpx.get("https://www.example.com")
86
- response.raise_for_status()
87
- except httpx.HTTPError as exc:
88
- print(f"HTTP Exception for {exc.request.url} - {exc}")
89
- ```
90
- """
91
-
92
- def __init__(self, message: str) -> None:
93
- super().__init__(message)
94
- self._request: Request | None = None
95
-
96
- @property
97
- def request(self) -> Request:
98
- if self._request is None:
99
- raise RuntimeError("The .request property has not been set.")
100
- return self._request
101
-
102
- @request.setter
103
- def request(self, request: Request) -> None:
104
- self._request = request
105
-
106
-
107
- class RequestError(HTTPError):
108
- """
109
- Base class for all exceptions that may occur when issuing a `.request()`.
110
- """
111
-
112
- def __init__(self, message: str, *, request: Request | None = None) -> None:
113
- super().__init__(message)
114
- # At the point an exception is raised we won't typically have a request
115
- # instance to associate it with.
116
- #
117
- # The 'request_context' context manager is used within the Client and
118
- # Response methods in order to ensure that any raised exceptions
119
- # have a `.request` property set on them.
120
- self._request = request
121
-
122
-
123
- class TransportError(RequestError):
124
- """
125
- Base class for all exceptions that occur at the level of the Transport API.
126
- """
127
-
128
-
129
- # Timeout exceptions...
130
-
131
-
132
- class TimeoutException(TransportError):
133
- """
134
- The base class for timeout errors.
135
-
136
- An operation has timed out.
137
- """
138
-
139
-
140
- class ConnectTimeout(TimeoutException):
141
- """
142
- Timed out while connecting to the host.
143
- """
144
-
145
-
146
- class ReadTimeout(TimeoutException):
147
- """
148
- Timed out while receiving data from the host.
149
- """
150
-
151
-
152
- class WriteTimeout(TimeoutException):
153
- """
154
- Timed out while sending data to the host.
155
- """
156
-
157
-
158
- class PoolTimeout(TimeoutException):
159
- """
160
- Timed out waiting to acquire a connection from the pool.
161
- """
162
-
163
-
164
- # Core networking exceptions...
165
-
166
-
167
- class NetworkError(TransportError):
168
- """
169
- The base class for network-related errors.
170
-
171
- An error occurred while interacting with the network.
172
- """
173
-
174
-
175
- class ReadError(NetworkError):
176
- """
177
- Failed to receive data from the network.
178
- """
179
-
180
-
181
- class WriteError(NetworkError):
182
- """
183
- Failed to send data through the network.
184
- """
185
-
186
-
187
- class ConnectError(NetworkError):
188
- """
189
- Failed to establish a connection.
190
- """
191
-
192
-
193
- class CloseError(NetworkError):
194
- """
195
- Failed to close a connection.
196
- """
197
-
198
-
199
- # Other transport exceptions...
200
-
201
-
202
- class ProxyError(TransportError):
203
- """
204
- An error occurred while establishing a proxy connection.
205
- """
206
-
207
-
208
- class UnsupportedProtocol(TransportError):
209
- """
210
- Attempted to make a request to an unsupported protocol.
211
-
212
- For example issuing a request to `ftp://www.example.com`.
213
- """
214
-
215
-
216
- class ProtocolError(TransportError):
217
- """
218
- The protocol was violated.
219
- """
220
-
221
-
222
- class LocalProtocolError(ProtocolError):
223
- """
224
- A protocol was violated by the client.
225
-
226
- For example if the user instantiated a `Request` instance explicitly,
227
- failed to include the mandatory `Host:` header, and then issued it directly
228
- using `client.send()`.
229
- """
230
-
231
-
232
- class RemoteProtocolError(ProtocolError):
233
- """
234
- The protocol was violated by the server.
235
-
236
- For example, returning malformed HTTP.
237
- """
238
-
239
-
240
- # Other request exceptions...
241
-
242
-
243
- class DecodingError(RequestError):
244
- """
245
- Decoding of the response failed, due to a malformed encoding.
246
- """
247
-
248
-
249
- class TooManyRedirects(RequestError):
250
- """
251
- Too many redirects.
252
- """
253
-
254
-
255
- # Client errors
256
-
257
-
258
- class HTTPStatusError(HTTPError):
259
- """
260
- The response had an error HTTP status of 4xx or 5xx.
261
-
262
- May be raised when calling `response.raise_for_status()`
263
- """
264
-
265
- def __init__(self, message: str, *, request: Request, response: Response) -> None:
266
- super().__init__(message)
267
- self.request = request
268
- self.response = response
269
-
270
-
271
- class InvalidURL(Exception):
272
- """
273
- URL is improperly formed or cannot be parsed.
274
- """
275
-
276
- def __init__(self, message: str) -> None:
277
- super().__init__(message)
278
-
279
-
280
- class CookieConflict(Exception):
281
- """
282
- Attempted to lookup a cookie by name, but multiple cookies existed.
283
-
284
- Can occur when calling `response.cookies.get(...)`.
285
- """
286
-
287
- def __init__(self, message: str) -> None:
288
- super().__init__(message)
289
-
290
-
291
- # Stream exceptions...
292
-
293
- # These may occur as the result of a programming error, by accessing
294
- # the request/response stream in an invalid manner.
295
-
296
-
297
- class StreamError(RuntimeError):
298
- """
299
- The base class for stream exceptions.
300
-
301
- The developer made an error in accessing the request stream in
302
- an invalid way.
303
- """
304
-
305
- def __init__(self, message: str) -> None:
306
- super().__init__(message)
307
-
308
-
309
- class StreamConsumed(StreamError):
310
- """
311
- Attempted to read or stream content, but the content has already
312
- been streamed.
313
- """
314
-
315
- def __init__(self) -> None:
316
- message = (
317
- "Attempted to read or stream some content, but the content has "
318
- "already been streamed. For requests, this could be due to passing "
319
- "a generator as request content, and then receiving a redirect "
320
- "response or a secondary request as part of an authentication flow."
321
- "For responses, this could be due to attempting to stream the response "
322
- "content more than once."
323
- )
324
- super().__init__(message)
325
-
326
-
327
- class StreamClosed(StreamError):
328
- """
329
- Attempted to read or stream response content, but the request has been
330
- closed.
331
- """
332
-
333
- def __init__(self) -> None:
334
- message = (
335
- "Attempted to read or stream content, but the stream has " "been closed."
336
- )
337
- super().__init__(message)
338
-
339
-
340
- class ResponseNotRead(StreamError):
341
- """
342
- Attempted to access streaming response content, without having called `read()`.
343
- """
344
-
345
- def __init__(self) -> None:
346
- message = (
347
- "Attempted to access streaming response content,"
348
- " without having called `read()`."
349
- )
350
- super().__init__(message)
351
-
352
-
353
- class RequestNotRead(StreamError):
354
- """
355
- Attempted to access streaming request content, without having called `read()`.
356
- """
357
-
358
- def __init__(self) -> None:
359
- message = (
360
- "Attempted to access streaming request content,"
361
- " without having called `read()`."
362
- )
363
- super().__init__(message)
364
-
365
-
366
- @contextlib.contextmanager
367
- def request_context(
368
- request: Request | None = None,
369
- ) -> typing.Iterator[None]:
370
- """
371
- A context manager that can be used to attach the given request context
372
- to any `RequestError` exceptions that are raised within the block.
373
- """
374
- try:
375
- yield
376
- except RequestError as exc:
377
- if request is not None:
378
- exc.request = request
379
- raise exc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_main.py DELETED
@@ -1,506 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import functools
4
- import json
5
- import sys
6
- import typing
7
-
8
- import click
9
- import pygments.lexers
10
- import pygments.util
11
- import rich.console
12
- import rich.markup
13
- import rich.progress
14
- import rich.syntax
15
- import rich.table
16
-
17
- from ._client import Client
18
- from ._exceptions import RequestError
19
- from ._models import Response
20
- from ._status_codes import codes
21
-
22
- if typing.TYPE_CHECKING:
23
- import httpcore # pragma: no cover
24
-
25
-
26
- def print_help() -> None:
27
- console = rich.console.Console()
28
-
29
- console.print("[bold]HTTPX :butterfly:", justify="center")
30
- console.print()
31
- console.print("A next generation HTTP client.", justify="center")
32
- console.print()
33
- console.print(
34
- "Usage: [bold]httpx[/bold] [cyan]<URL> [OPTIONS][/cyan] ", justify="left"
35
- )
36
- console.print()
37
-
38
- table = rich.table.Table.grid(padding=1, pad_edge=True)
39
- table.add_column("Parameter", no_wrap=True, justify="left", style="bold")
40
- table.add_column("Description")
41
- table.add_row(
42
- "-m, --method [cyan]METHOD",
43
- "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n"
44
- "[Default: GET, or POST if a request body is included]",
45
- )
46
- table.add_row(
47
- "-p, --params [cyan]<NAME VALUE> ...",
48
- "Query parameters to include in the request URL.",
49
- )
50
- table.add_row(
51
- "-c, --content [cyan]TEXT", "Byte content to include in the request body."
52
- )
53
- table.add_row(
54
- "-d, --data [cyan]<NAME VALUE> ...", "Form data to include in the request body."
55
- )
56
- table.add_row(
57
- "-f, --files [cyan]<NAME FILENAME> ...",
58
- "Form files to include in the request body.",
59
- )
60
- table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.")
61
- table.add_row(
62
- "-h, --headers [cyan]<NAME VALUE> ...",
63
- "Include additional HTTP headers in the request.",
64
- )
65
- table.add_row(
66
- "--cookies [cyan]<NAME VALUE> ...", "Cookies to include in the request."
67
- )
68
- table.add_row(
69
- "--auth [cyan]<USER PASS>",
70
- "Username and password to include in the request. Specify '-' for the password"
71
- " to use a password prompt. Note that using --verbose/-v will expose"
72
- " the Authorization header, including the password encoding"
73
- " in a trivially reversible format.",
74
- )
75
-
76
- table.add_row(
77
- "--proxy [cyan]URL",
78
- "Send the request via a proxy. Should be the URL giving the proxy address.",
79
- )
80
-
81
- table.add_row(
82
- "--timeout [cyan]FLOAT",
83
- "Timeout value to use for network operations, such as establishing the"
84
- " connection, reading some data, etc... [Default: 5.0]",
85
- )
86
-
87
- table.add_row("--follow-redirects", "Automatically follow redirects.")
88
- table.add_row("--no-verify", "Disable SSL verification.")
89
- table.add_row(
90
- "--http2", "Send the request using HTTP/2, if the remote server supports it."
91
- )
92
-
93
- table.add_row(
94
- "--download [cyan]FILE",
95
- "Save the response content as a file, rather than displaying it.",
96
- )
97
-
98
- table.add_row("-v, --verbose", "Verbose output. Show request as well as response.")
99
- table.add_row("--help", "Show this message and exit.")
100
- console.print(table)
101
-
102
-
103
- def get_lexer_for_response(response: Response) -> str:
104
- content_type = response.headers.get("Content-Type")
105
- if content_type is not None:
106
- mime_type, _, _ = content_type.partition(";")
107
- try:
108
- return typing.cast(
109
- str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name
110
- )
111
- except pygments.util.ClassNotFound: # pragma: no cover
112
- pass
113
- return "" # pragma: no cover
114
-
115
-
116
- def format_request_headers(request: httpcore.Request, http2: bool = False) -> str:
117
- version = "HTTP/2" if http2 else "HTTP/1.1"
118
- headers = [
119
- (name.lower() if http2 else name, value) for name, value in request.headers
120
- ]
121
- method = request.method.decode("ascii")
122
- target = request.url.target.decode("ascii")
123
- lines = [f"{method} {target} {version}"] + [
124
- f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers
125
- ]
126
- return "\n".join(lines)
127
-
128
-
129
- def format_response_headers(
130
- http_version: bytes,
131
- status: int,
132
- reason_phrase: bytes | None,
133
- headers: list[tuple[bytes, bytes]],
134
- ) -> str:
135
- version = http_version.decode("ascii")
136
- reason = (
137
- codes.get_reason_phrase(status)
138
- if reason_phrase is None
139
- else reason_phrase.decode("ascii")
140
- )
141
- lines = [f"{version} {status} {reason}"] + [
142
- f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers
143
- ]
144
- return "\n".join(lines)
145
-
146
-
147
- def print_request_headers(request: httpcore.Request, http2: bool = False) -> None:
148
- console = rich.console.Console()
149
- http_text = format_request_headers(request, http2=http2)
150
- syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True)
151
- console.print(syntax)
152
- syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True)
153
- console.print(syntax)
154
-
155
-
156
- def print_response_headers(
157
- http_version: bytes,
158
- status: int,
159
- reason_phrase: bytes | None,
160
- headers: list[tuple[bytes, bytes]],
161
- ) -> None:
162
- console = rich.console.Console()
163
- http_text = format_response_headers(http_version, status, reason_phrase, headers)
164
- syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True)
165
- console.print(syntax)
166
- syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True)
167
- console.print(syntax)
168
-
169
-
170
- def print_response(response: Response) -> None:
171
- console = rich.console.Console()
172
- lexer_name = get_lexer_for_response(response)
173
- if lexer_name:
174
- if lexer_name.lower() == "json":
175
- try:
176
- data = response.json()
177
- text = json.dumps(data, indent=4)
178
- except ValueError: # pragma: no cover
179
- text = response.text
180
- else:
181
- text = response.text
182
-
183
- syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True)
184
- console.print(syntax)
185
- else:
186
- console.print(f"<{len(response.content)} bytes of binary data>")
187
-
188
-
189
- _PCTRTT = typing.Tuple[typing.Tuple[str, str], ...]
190
- _PCTRTTT = typing.Tuple[_PCTRTT, ...]
191
- _PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]]
192
-
193
-
194
- def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover
195
- lines = []
196
- for key, value in cert.items():
197
- if isinstance(value, (list, tuple)):
198
- lines.append(f"* {key}:")
199
- for item in value:
200
- if key in ("subject", "issuer"):
201
- for sub_item in item:
202
- lines.append(f"* {sub_item[0]}: {sub_item[1]!r}")
203
- elif isinstance(item, tuple) and len(item) == 2:
204
- lines.append(f"* {item[0]}: {item[1]!r}")
205
- else:
206
- lines.append(f"* {item!r}")
207
- else:
208
- lines.append(f"* {key}: {value!r}")
209
- return "\n".join(lines)
210
-
211
-
212
- def trace(
213
- name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False
214
- ) -> None:
215
- console = rich.console.Console()
216
- if name == "connection.connect_tcp.started" and verbose:
217
- host = info["host"]
218
- console.print(f"* Connecting to {host!r}")
219
- elif name == "connection.connect_tcp.complete" and verbose:
220
- stream = info["return_value"]
221
- server_addr = stream.get_extra_info("server_addr")
222
- console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}")
223
- elif name == "connection.start_tls.complete" and verbose: # pragma: no cover
224
- stream = info["return_value"]
225
- ssl_object = stream.get_extra_info("ssl_object")
226
- version = ssl_object.version()
227
- cipher = ssl_object.cipher()
228
- server_cert = ssl_object.getpeercert()
229
- alpn = ssl_object.selected_alpn_protocol()
230
- console.print(f"* SSL established using {version!r} / {cipher[0]!r}")
231
- console.print(f"* Selected ALPN protocol: {alpn!r}")
232
- if server_cert:
233
- console.print("* Server certificate:")
234
- console.print(format_certificate(server_cert))
235
- elif name == "http11.send_request_headers.started" and verbose:
236
- request = info["request"]
237
- print_request_headers(request, http2=False)
238
- elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover
239
- request = info["request"]
240
- print_request_headers(request, http2=True)
241
- elif name == "http11.receive_response_headers.complete":
242
- http_version, status, reason_phrase, headers = info["return_value"]
243
- print_response_headers(http_version, status, reason_phrase, headers)
244
- elif name == "http2.receive_response_headers.complete": # pragma: no cover
245
- status, headers = info["return_value"]
246
- http_version = b"HTTP/2"
247
- reason_phrase = None
248
- print_response_headers(http_version, status, reason_phrase, headers)
249
-
250
-
251
- def download_response(response: Response, download: typing.BinaryIO) -> None:
252
- console = rich.console.Console()
253
- console.print()
254
- content_length = response.headers.get("Content-Length")
255
- with rich.progress.Progress(
256
- "[progress.description]{task.description}",
257
- "[progress.percentage]{task.percentage:>3.0f}%",
258
- rich.progress.BarColumn(bar_width=None),
259
- rich.progress.DownloadColumn(),
260
- rich.progress.TransferSpeedColumn(),
261
- ) as progress:
262
- description = f"Downloading [bold]{rich.markup.escape(download.name)}"
263
- download_task = progress.add_task(
264
- description,
265
- total=int(content_length or 0),
266
- start=content_length is not None,
267
- )
268
- for chunk in response.iter_bytes():
269
- download.write(chunk)
270
- progress.update(download_task, completed=response.num_bytes_downloaded)
271
-
272
-
273
- def validate_json(
274
- ctx: click.Context,
275
- param: click.Option | click.Parameter,
276
- value: typing.Any,
277
- ) -> typing.Any:
278
- if value is None:
279
- return None
280
-
281
- try:
282
- return json.loads(value)
283
- except json.JSONDecodeError: # pragma: no cover
284
- raise click.BadParameter("Not valid JSON")
285
-
286
-
287
- def validate_auth(
288
- ctx: click.Context,
289
- param: click.Option | click.Parameter,
290
- value: typing.Any,
291
- ) -> typing.Any:
292
- if value == (None, None):
293
- return None
294
-
295
- username, password = value
296
- if password == "-": # pragma: no cover
297
- password = click.prompt("Password", hide_input=True)
298
- return (username, password)
299
-
300
-
301
- def handle_help(
302
- ctx: click.Context,
303
- param: click.Option | click.Parameter,
304
- value: typing.Any,
305
- ) -> None:
306
- if not value or ctx.resilient_parsing:
307
- return
308
-
309
- print_help()
310
- ctx.exit()
311
-
312
-
313
- @click.command(add_help_option=False)
314
- @click.argument("url", type=str)
315
- @click.option(
316
- "--method",
317
- "-m",
318
- "method",
319
- type=str,
320
- help=(
321
- "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. "
322
- "[Default: GET, or POST if a request body is included]"
323
- ),
324
- )
325
- @click.option(
326
- "--params",
327
- "-p",
328
- "params",
329
- type=(str, str),
330
- multiple=True,
331
- help="Query parameters to include in the request URL.",
332
- )
333
- @click.option(
334
- "--content",
335
- "-c",
336
- "content",
337
- type=str,
338
- help="Byte content to include in the request body.",
339
- )
340
- @click.option(
341
- "--data",
342
- "-d",
343
- "data",
344
- type=(str, str),
345
- multiple=True,
346
- help="Form data to include in the request body.",
347
- )
348
- @click.option(
349
- "--files",
350
- "-f",
351
- "files",
352
- type=(str, click.File(mode="rb")),
353
- multiple=True,
354
- help="Form files to include in the request body.",
355
- )
356
- @click.option(
357
- "--json",
358
- "-j",
359
- "json",
360
- type=str,
361
- callback=validate_json,
362
- help="JSON data to include in the request body.",
363
- )
364
- @click.option(
365
- "--headers",
366
- "-h",
367
- "headers",
368
- type=(str, str),
369
- multiple=True,
370
- help="Include additional HTTP headers in the request.",
371
- )
372
- @click.option(
373
- "--cookies",
374
- "cookies",
375
- type=(str, str),
376
- multiple=True,
377
- help="Cookies to include in the request.",
378
- )
379
- @click.option(
380
- "--auth",
381
- "auth",
382
- type=(str, str),
383
- default=(None, None),
384
- callback=validate_auth,
385
- help=(
386
- "Username and password to include in the request. "
387
- "Specify '-' for the password to use a password prompt. "
388
- "Note that using --verbose/-v will expose the Authorization header, "
389
- "including the password encoding in a trivially reversible format."
390
- ),
391
- )
392
- @click.option(
393
- "--proxy",
394
- "proxy",
395
- type=str,
396
- default=None,
397
- help="Send the request via a proxy. Should be the URL giving the proxy address.",
398
- )
399
- @click.option(
400
- "--timeout",
401
- "timeout",
402
- type=float,
403
- default=5.0,
404
- help=(
405
- "Timeout value to use for network operations, such as establishing the "
406
- "connection, reading some data, etc... [Default: 5.0]"
407
- ),
408
- )
409
- @click.option(
410
- "--follow-redirects",
411
- "follow_redirects",
412
- is_flag=True,
413
- default=False,
414
- help="Automatically follow redirects.",
415
- )
416
- @click.option(
417
- "--no-verify",
418
- "verify",
419
- is_flag=True,
420
- default=True,
421
- help="Disable SSL verification.",
422
- )
423
- @click.option(
424
- "--http2",
425
- "http2",
426
- type=bool,
427
- is_flag=True,
428
- default=False,
429
- help="Send the request using HTTP/2, if the remote server supports it.",
430
- )
431
- @click.option(
432
- "--download",
433
- type=click.File("wb"),
434
- help="Save the response content as a file, rather than displaying it.",
435
- )
436
- @click.option(
437
- "--verbose",
438
- "-v",
439
- type=bool,
440
- is_flag=True,
441
- default=False,
442
- help="Verbose. Show request as well as response.",
443
- )
444
- @click.option(
445
- "--help",
446
- is_flag=True,
447
- is_eager=True,
448
- expose_value=False,
449
- callback=handle_help,
450
- help="Show this message and exit.",
451
- )
452
- def main(
453
- url: str,
454
- method: str,
455
- params: list[tuple[str, str]],
456
- content: str,
457
- data: list[tuple[str, str]],
458
- files: list[tuple[str, click.File]],
459
- json: str,
460
- headers: list[tuple[str, str]],
461
- cookies: list[tuple[str, str]],
462
- auth: tuple[str, str] | None,
463
- proxy: str,
464
- timeout: float,
465
- follow_redirects: bool,
466
- verify: bool,
467
- http2: bool,
468
- download: typing.BinaryIO | None,
469
- verbose: bool,
470
- ) -> None:
471
- """
472
- An HTTP command line client.
473
- Sends a request and displays the response.
474
- """
475
- if not method:
476
- method = "POST" if content or data or files or json else "GET"
477
-
478
- try:
479
- with Client(proxy=proxy, timeout=timeout, http2=http2, verify=verify) as client:
480
- with client.stream(
481
- method,
482
- url,
483
- params=list(params),
484
- content=content,
485
- data=dict(data),
486
- files=files, # type: ignore
487
- json=json,
488
- headers=headers,
489
- cookies=dict(cookies),
490
- auth=auth,
491
- follow_redirects=follow_redirects,
492
- extensions={"trace": functools.partial(trace, verbose=verbose)},
493
- ) as response:
494
- if download is not None:
495
- download_response(response, download)
496
- else:
497
- response.read()
498
- if response.content:
499
- print_response(response)
500
-
501
- except RequestError as exc:
502
- console = rich.console.Console()
503
- console.print(f"[red]{type(exc).__name__}[/red]: {exc}")
504
- sys.exit(1)
505
-
506
- sys.exit(0 if response.is_success else 1)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_models.py DELETED
@@ -1,1277 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import codecs
4
- import datetime
5
- import email.message
6
- import json as jsonlib
7
- import re
8
- import typing
9
- import urllib.request
10
- from collections.abc import Mapping
11
- from http.cookiejar import Cookie, CookieJar
12
-
13
- from ._content import ByteStream, UnattachedStream, encode_request, encode_response
14
- from ._decoders import (
15
- SUPPORTED_DECODERS,
16
- ByteChunker,
17
- ContentDecoder,
18
- IdentityDecoder,
19
- LineDecoder,
20
- MultiDecoder,
21
- TextChunker,
22
- TextDecoder,
23
- )
24
- from ._exceptions import (
25
- CookieConflict,
26
- HTTPStatusError,
27
- RequestNotRead,
28
- ResponseNotRead,
29
- StreamClosed,
30
- StreamConsumed,
31
- request_context,
32
- )
33
- from ._multipart import get_multipart_boundary_from_content_type
34
- from ._status_codes import codes
35
- from ._types import (
36
- AsyncByteStream,
37
- CookieTypes,
38
- HeaderTypes,
39
- QueryParamTypes,
40
- RequestContent,
41
- RequestData,
42
- RequestExtensions,
43
- RequestFiles,
44
- ResponseContent,
45
- ResponseExtensions,
46
- SyncByteStream,
47
- )
48
- from ._urls import URL
49
- from ._utils import to_bytes_or_str, to_str
50
-
51
- __all__ = ["Cookies", "Headers", "Request", "Response"]
52
-
53
- SENSITIVE_HEADERS = {"authorization", "proxy-authorization"}
54
-
55
-
56
- def _is_known_encoding(encoding: str) -> bool:
57
- """
58
- Return `True` if `encoding` is a known codec.
59
- """
60
- try:
61
- codecs.lookup(encoding)
62
- except LookupError:
63
- return False
64
- return True
65
-
66
-
67
- def _normalize_header_key(key: str | bytes, encoding: str | None = None) -> bytes:
68
- """
69
- Coerce str/bytes into a strictly byte-wise HTTP header key.
70
- """
71
- return key if isinstance(key, bytes) else key.encode(encoding or "ascii")
72
-
73
-
74
- def _normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes:
75
- """
76
- Coerce str/bytes into a strictly byte-wise HTTP header value.
77
- """
78
- if isinstance(value, bytes):
79
- return value
80
- if not isinstance(value, str):
81
- raise TypeError(f"Header value must be str or bytes, not {type(value)}")
82
- return value.encode(encoding or "ascii")
83
-
84
-
85
- def _parse_content_type_charset(content_type: str) -> str | None:
86
- # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery.
87
- # See: https://peps.python.org/pep-0594/#cgi
88
- msg = email.message.Message()
89
- msg["content-type"] = content_type
90
- return msg.get_content_charset(failobj=None)
91
-
92
-
93
- def _parse_header_links(value: str) -> list[dict[str, str]]:
94
- """
95
- Returns a list of parsed link headers, for more info see:
96
- https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link
97
- The generic syntax of those is:
98
- Link: < uri-reference >; param1=value1; param2="value2"
99
- So for instance:
100
- Link; '<http:/.../front.jpeg>; type="image/jpeg",<http://.../back.jpeg>;'
101
- would return
102
- [
103
- {"url": "http:/.../front.jpeg", "type": "image/jpeg"},
104
- {"url": "http://.../back.jpeg"},
105
- ]
106
- :param value: HTTP Link entity-header field
107
- :return: list of parsed link headers
108
- """
109
- links: list[dict[str, str]] = []
110
- replace_chars = " '\""
111
- value = value.strip(replace_chars)
112
- if not value:
113
- return links
114
- for val in re.split(", *<", value):
115
- try:
116
- url, params = val.split(";", 1)
117
- except ValueError:
118
- url, params = val, ""
119
- link = {"url": url.strip("<> '\"")}
120
- for param in params.split(";"):
121
- try:
122
- key, value = param.split("=")
123
- except ValueError:
124
- break
125
- link[key.strip(replace_chars)] = value.strip(replace_chars)
126
- links.append(link)
127
- return links
128
-
129
-
130
- def _obfuscate_sensitive_headers(
131
- items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]],
132
- ) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]:
133
- for k, v in items:
134
- if to_str(k.lower()) in SENSITIVE_HEADERS:
135
- v = to_bytes_or_str("[secure]", match_type_of=v)
136
- yield k, v
137
-
138
-
139
- class Headers(typing.MutableMapping[str, str]):
140
- """
141
- HTTP headers, as a case-insensitive multi-dict.
142
- """
143
-
144
- def __init__(
145
- self,
146
- headers: HeaderTypes | None = None,
147
- encoding: str | None = None,
148
- ) -> None:
149
- self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]]
150
-
151
- if isinstance(headers, Headers):
152
- self._list = list(headers._list)
153
- elif isinstance(headers, Mapping):
154
- for k, v in headers.items():
155
- bytes_key = _normalize_header_key(k, encoding)
156
- bytes_value = _normalize_header_value(v, encoding)
157
- self._list.append((bytes_key, bytes_key.lower(), bytes_value))
158
- elif headers is not None:
159
- for k, v in headers:
160
- bytes_key = _normalize_header_key(k, encoding)
161
- bytes_value = _normalize_header_value(v, encoding)
162
- self._list.append((bytes_key, bytes_key.lower(), bytes_value))
163
-
164
- self._encoding = encoding
165
-
166
- @property
167
- def encoding(self) -> str:
168
- """
169
- Header encoding is mandated as ascii, but we allow fallbacks to utf-8
170
- or iso-8859-1.
171
- """
172
- if self._encoding is None:
173
- for encoding in ["ascii", "utf-8"]:
174
- for key, value in self.raw:
175
- try:
176
- key.decode(encoding)
177
- value.decode(encoding)
178
- except UnicodeDecodeError:
179
- break
180
- else:
181
- # The else block runs if 'break' did not occur, meaning
182
- # all values fitted the encoding.
183
- self._encoding = encoding
184
- break
185
- else:
186
- # The ISO-8859-1 encoding covers all 256 code points in a byte,
187
- # so will never raise decode errors.
188
- self._encoding = "iso-8859-1"
189
- return self._encoding
190
-
191
- @encoding.setter
192
- def encoding(self, value: str) -> None:
193
- self._encoding = value
194
-
195
- @property
196
- def raw(self) -> list[tuple[bytes, bytes]]:
197
- """
198
- Returns a list of the raw header items, as byte pairs.
199
- """
200
- return [(raw_key, value) for raw_key, _, value in self._list]
201
-
202
- def keys(self) -> typing.KeysView[str]:
203
- return {key.decode(self.encoding): None for _, key, value in self._list}.keys()
204
-
205
- def values(self) -> typing.ValuesView[str]:
206
- values_dict: dict[str, str] = {}
207
- for _, key, value in self._list:
208
- str_key = key.decode(self.encoding)
209
- str_value = value.decode(self.encoding)
210
- if str_key in values_dict:
211
- values_dict[str_key] += f", {str_value}"
212
- else:
213
- values_dict[str_key] = str_value
214
- return values_dict.values()
215
-
216
- def items(self) -> typing.ItemsView[str, str]:
217
- """
218
- Return `(key, value)` items of headers. Concatenate headers
219
- into a single comma separated value when a key occurs multiple times.
220
- """
221
- values_dict: dict[str, str] = {}
222
- for _, key, value in self._list:
223
- str_key = key.decode(self.encoding)
224
- str_value = value.decode(self.encoding)
225
- if str_key in values_dict:
226
- values_dict[str_key] += f", {str_value}"
227
- else:
228
- values_dict[str_key] = str_value
229
- return values_dict.items()
230
-
231
- def multi_items(self) -> list[tuple[str, str]]:
232
- """
233
- Return a list of `(key, value)` pairs of headers. Allow multiple
234
- occurrences of the same key without concatenating into a single
235
- comma separated value.
236
- """
237
- return [
238
- (key.decode(self.encoding), value.decode(self.encoding))
239
- for _, key, value in self._list
240
- ]
241
-
242
- def get(self, key: str, default: typing.Any = None) -> typing.Any:
243
- """
244
- Return a header value. If multiple occurrences of the header occur
245
- then concatenate them together with commas.
246
- """
247
- try:
248
- return self[key]
249
- except KeyError:
250
- return default
251
-
252
- def get_list(self, key: str, split_commas: bool = False) -> list[str]:
253
- """
254
- Return a list of all header values for a given key.
255
- If `split_commas=True` is passed, then any comma separated header
256
- values are split into multiple return strings.
257
- """
258
- get_header_key = key.lower().encode(self.encoding)
259
-
260
- values = [
261
- item_value.decode(self.encoding)
262
- for _, item_key, item_value in self._list
263
- if item_key.lower() == get_header_key
264
- ]
265
-
266
- if not split_commas:
267
- return values
268
-
269
- split_values = []
270
- for value in values:
271
- split_values.extend([item.strip() for item in value.split(",")])
272
- return split_values
273
-
274
- def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore
275
- headers = Headers(headers)
276
- for key in headers.keys():
277
- if key in self:
278
- self.pop(key)
279
- self._list.extend(headers._list)
280
-
281
- def copy(self) -> Headers:
282
- return Headers(self, encoding=self.encoding)
283
-
284
- def __getitem__(self, key: str) -> str:
285
- """
286
- Return a single header value.
287
-
288
- If there are multiple headers with the same key, then we concatenate
289
- them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2
290
- """
291
- normalized_key = key.lower().encode(self.encoding)
292
-
293
- items = [
294
- header_value.decode(self.encoding)
295
- for _, header_key, header_value in self._list
296
- if header_key == normalized_key
297
- ]
298
-
299
- if items:
300
- return ", ".join(items)
301
-
302
- raise KeyError(key)
303
-
304
- def __setitem__(self, key: str, value: str) -> None:
305
- """
306
- Set the header `key` to `value`, removing any duplicate entries.
307
- Retains insertion order.
308
- """
309
- set_key = key.encode(self._encoding or "utf-8")
310
- set_value = value.encode(self._encoding or "utf-8")
311
- lookup_key = set_key.lower()
312
-
313
- found_indexes = [
314
- idx
315
- for idx, (_, item_key, _) in enumerate(self._list)
316
- if item_key == lookup_key
317
- ]
318
-
319
- for idx in reversed(found_indexes[1:]):
320
- del self._list[idx]
321
-
322
- if found_indexes:
323
- idx = found_indexes[0]
324
- self._list[idx] = (set_key, lookup_key, set_value)
325
- else:
326
- self._list.append((set_key, lookup_key, set_value))
327
-
328
- def __delitem__(self, key: str) -> None:
329
- """
330
- Remove the header `key`.
331
- """
332
- del_key = key.lower().encode(self.encoding)
333
-
334
- pop_indexes = [
335
- idx
336
- for idx, (_, item_key, _) in enumerate(self._list)
337
- if item_key.lower() == del_key
338
- ]
339
-
340
- if not pop_indexes:
341
- raise KeyError(key)
342
-
343
- for idx in reversed(pop_indexes):
344
- del self._list[idx]
345
-
346
- def __contains__(self, key: typing.Any) -> bool:
347
- header_key = key.lower().encode(self.encoding)
348
- return header_key in [key for _, key, _ in self._list]
349
-
350
- def __iter__(self) -> typing.Iterator[typing.Any]:
351
- return iter(self.keys())
352
-
353
- def __len__(self) -> int:
354
- return len(self._list)
355
-
356
- def __eq__(self, other: typing.Any) -> bool:
357
- try:
358
- other_headers = Headers(other)
359
- except ValueError:
360
- return False
361
-
362
- self_list = [(key, value) for _, key, value in self._list]
363
- other_list = [(key, value) for _, key, value in other_headers._list]
364
- return sorted(self_list) == sorted(other_list)
365
-
366
- def __repr__(self) -> str:
367
- class_name = self.__class__.__name__
368
-
369
- encoding_str = ""
370
- if self.encoding != "ascii":
371
- encoding_str = f", encoding={self.encoding!r}"
372
-
373
- as_list = list(_obfuscate_sensitive_headers(self.multi_items()))
374
- as_dict = dict(as_list)
375
-
376
- no_duplicate_keys = len(as_dict) == len(as_list)
377
- if no_duplicate_keys:
378
- return f"{class_name}({as_dict!r}{encoding_str})"
379
- return f"{class_name}({as_list!r}{encoding_str})"
380
-
381
-
382
- class Request:
383
- def __init__(
384
- self,
385
- method: str,
386
- url: URL | str,
387
- *,
388
- params: QueryParamTypes | None = None,
389
- headers: HeaderTypes | None = None,
390
- cookies: CookieTypes | None = None,
391
- content: RequestContent | None = None,
392
- data: RequestData | None = None,
393
- files: RequestFiles | None = None,
394
- json: typing.Any | None = None,
395
- stream: SyncByteStream | AsyncByteStream | None = None,
396
- extensions: RequestExtensions | None = None,
397
- ) -> None:
398
- self.method = method.upper()
399
- self.url = URL(url) if params is None else URL(url, params=params)
400
- self.headers = Headers(headers)
401
- self.extensions = {} if extensions is None else dict(extensions)
402
-
403
- if cookies:
404
- Cookies(cookies).set_cookie_header(self)
405
-
406
- if stream is None:
407
- content_type: str | None = self.headers.get("content-type")
408
- headers, stream = encode_request(
409
- content=content,
410
- data=data,
411
- files=files,
412
- json=json,
413
- boundary=get_multipart_boundary_from_content_type(
414
- content_type=content_type.encode(self.headers.encoding)
415
- if content_type
416
- else None
417
- ),
418
- )
419
- self._prepare(headers)
420
- self.stream = stream
421
- # Load the request body, except for streaming content.
422
- if isinstance(stream, ByteStream):
423
- self.read()
424
- else:
425
- # There's an important distinction between `Request(content=...)`,
426
- # and `Request(stream=...)`.
427
- #
428
- # Using `content=...` implies automatically populated `Host` and content
429
- # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`.
430
- #
431
- # Using `stream=...` will not automatically include *any*
432
- # auto-populated headers.
433
- #
434
- # As an end-user you don't really need `stream=...`. It's only
435
- # useful when:
436
- #
437
- # * Preserving the request stream when copying requests, eg for redirects.
438
- # * Creating request instances on the *server-side* of the transport API.
439
- self.stream = stream
440
-
441
- def _prepare(self, default_headers: dict[str, str]) -> None:
442
- for key, value in default_headers.items():
443
- # Ignore Transfer-Encoding if the Content-Length has been set explicitly.
444
- if key.lower() == "transfer-encoding" and "Content-Length" in self.headers:
445
- continue
446
- self.headers.setdefault(key, value)
447
-
448
- auto_headers: list[tuple[bytes, bytes]] = []
449
-
450
- has_host = "Host" in self.headers
451
- has_content_length = (
452
- "Content-Length" in self.headers or "Transfer-Encoding" in self.headers
453
- )
454
-
455
- if not has_host and self.url.host:
456
- auto_headers.append((b"Host", self.url.netloc))
457
- if not has_content_length and self.method in ("POST", "PUT", "PATCH"):
458
- auto_headers.append((b"Content-Length", b"0"))
459
-
460
- self.headers = Headers(auto_headers + self.headers.raw)
461
-
462
- @property
463
- def content(self) -> bytes:
464
- if not hasattr(self, "_content"):
465
- raise RequestNotRead()
466
- return self._content
467
-
468
- def read(self) -> bytes:
469
- """
470
- Read and return the request content.
471
- """
472
- if not hasattr(self, "_content"):
473
- assert isinstance(self.stream, typing.Iterable)
474
- self._content = b"".join(self.stream)
475
- if not isinstance(self.stream, ByteStream):
476
- # If a streaming request has been read entirely into memory, then
477
- # we can replace the stream with a raw bytes implementation,
478
- # to ensure that any non-replayable streams can still be used.
479
- self.stream = ByteStream(self._content)
480
- return self._content
481
-
482
- async def aread(self) -> bytes:
483
- """
484
- Read and return the request content.
485
- """
486
- if not hasattr(self, "_content"):
487
- assert isinstance(self.stream, typing.AsyncIterable)
488
- self._content = b"".join([part async for part in self.stream])
489
- if not isinstance(self.stream, ByteStream):
490
- # If a streaming request has been read entirely into memory, then
491
- # we can replace the stream with a raw bytes implementation,
492
- # to ensure that any non-replayable streams can still be used.
493
- self.stream = ByteStream(self._content)
494
- return self._content
495
-
496
- def __repr__(self) -> str:
497
- class_name = self.__class__.__name__
498
- url = str(self.url)
499
- return f"<{class_name}({self.method!r}, {url!r})>"
500
-
501
- def __getstate__(self) -> dict[str, typing.Any]:
502
- return {
503
- name: value
504
- for name, value in self.__dict__.items()
505
- if name not in ["extensions", "stream"]
506
- }
507
-
508
- def __setstate__(self, state: dict[str, typing.Any]) -> None:
509
- for name, value in state.items():
510
- setattr(self, name, value)
511
- self.extensions = {}
512
- self.stream = UnattachedStream()
513
-
514
-
515
- class Response:
516
- def __init__(
517
- self,
518
- status_code: int,
519
- *,
520
- headers: HeaderTypes | None = None,
521
- content: ResponseContent | None = None,
522
- text: str | None = None,
523
- html: str | None = None,
524
- json: typing.Any = None,
525
- stream: SyncByteStream | AsyncByteStream | None = None,
526
- request: Request | None = None,
527
- extensions: ResponseExtensions | None = None,
528
- history: list[Response] | None = None,
529
- default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
530
- ) -> None:
531
- self.status_code = status_code
532
- self.headers = Headers(headers)
533
-
534
- self._request: Request | None = request
535
-
536
- # When follow_redirects=False and a redirect is received,
537
- # the client will set `response.next_request`.
538
- self.next_request: Request | None = None
539
-
540
- self.extensions = {} if extensions is None else dict(extensions)
541
- self.history = [] if history is None else list(history)
542
-
543
- self.is_closed = False
544
- self.is_stream_consumed = False
545
-
546
- self.default_encoding = default_encoding
547
-
548
- if stream is None:
549
- headers, stream = encode_response(content, text, html, json)
550
- self._prepare(headers)
551
- self.stream = stream
552
- if isinstance(stream, ByteStream):
553
- # Load the response body, except for streaming content.
554
- self.read()
555
- else:
556
- # There's an important distinction between `Response(content=...)`,
557
- # and `Response(stream=...)`.
558
- #
559
- # Using `content=...` implies automatically populated content headers,
560
- # of either `Content-Length: ...` or `Transfer-Encoding: chunked`.
561
- #
562
- # Using `stream=...` will not automatically include any content headers.
563
- #
564
- # As an end-user you don't really need `stream=...`. It's only
565
- # useful when creating response instances having received a stream
566
- # from the transport API.
567
- self.stream = stream
568
-
569
- self._num_bytes_downloaded = 0
570
-
571
- def _prepare(self, default_headers: dict[str, str]) -> None:
572
- for key, value in default_headers.items():
573
- # Ignore Transfer-Encoding if the Content-Length has been set explicitly.
574
- if key.lower() == "transfer-encoding" and "content-length" in self.headers:
575
- continue
576
- self.headers.setdefault(key, value)
577
-
578
- @property
579
- def elapsed(self) -> datetime.timedelta:
580
- """
581
- Returns the time taken for the complete request/response
582
- cycle to complete.
583
- """
584
- if not hasattr(self, "_elapsed"):
585
- raise RuntimeError(
586
- "'.elapsed' may only be accessed after the response "
587
- "has been read or closed."
588
- )
589
- return self._elapsed
590
-
591
- @elapsed.setter
592
- def elapsed(self, elapsed: datetime.timedelta) -> None:
593
- self._elapsed = elapsed
594
-
595
- @property
596
- def request(self) -> Request:
597
- """
598
- Returns the request instance associated to the current response.
599
- """
600
- if self._request is None:
601
- raise RuntimeError(
602
- "The request instance has not been set on this response."
603
- )
604
- return self._request
605
-
606
- @request.setter
607
- def request(self, value: Request) -> None:
608
- self._request = value
609
-
610
- @property
611
- def http_version(self) -> str:
612
- try:
613
- http_version: bytes = self.extensions["http_version"]
614
- except KeyError:
615
- return "HTTP/1.1"
616
- else:
617
- return http_version.decode("ascii", errors="ignore")
618
-
619
- @property
620
- def reason_phrase(self) -> str:
621
- try:
622
- reason_phrase: bytes = self.extensions["reason_phrase"]
623
- except KeyError:
624
- return codes.get_reason_phrase(self.status_code)
625
- else:
626
- return reason_phrase.decode("ascii", errors="ignore")
627
-
628
- @property
629
- def url(self) -> URL:
630
- """
631
- Returns the URL for which the request was made.
632
- """
633
- return self.request.url
634
-
635
- @property
636
- def content(self) -> bytes:
637
- if not hasattr(self, "_content"):
638
- raise ResponseNotRead()
639
- return self._content
640
-
641
- @property
642
- def text(self) -> str:
643
- if not hasattr(self, "_text"):
644
- content = self.content
645
- if not content:
646
- self._text = ""
647
- else:
648
- decoder = TextDecoder(encoding=self.encoding or "utf-8")
649
- self._text = "".join([decoder.decode(self.content), decoder.flush()])
650
- return self._text
651
-
652
- @property
653
- def encoding(self) -> str | None:
654
- """
655
- Return an encoding to use for decoding the byte content into text.
656
- The priority for determining this is given by...
657
-
658
- * `.encoding = <>` has been set explicitly.
659
- * The encoding as specified by the charset parameter in the Content-Type header.
660
- * The encoding as determined by `default_encoding`, which may either be
661
- a string like "utf-8" indicating the encoding to use, or may be a callable
662
- which enables charset autodetection.
663
- """
664
- if not hasattr(self, "_encoding"):
665
- encoding = self.charset_encoding
666
- if encoding is None or not _is_known_encoding(encoding):
667
- if isinstance(self.default_encoding, str):
668
- encoding = self.default_encoding
669
- elif hasattr(self, "_content"):
670
- encoding = self.default_encoding(self._content)
671
- self._encoding = encoding or "utf-8"
672
- return self._encoding
673
-
674
- @encoding.setter
675
- def encoding(self, value: str) -> None:
676
- """
677
- Set the encoding to use for decoding the byte content into text.
678
-
679
- If the `text` attribute has been accessed, attempting to set the
680
- encoding will throw a ValueError.
681
- """
682
- if hasattr(self, "_text"):
683
- raise ValueError(
684
- "Setting encoding after `text` has been accessed is not allowed."
685
- )
686
- self._encoding = value
687
-
688
- @property
689
- def charset_encoding(self) -> str | None:
690
- """
691
- Return the encoding, as specified by the Content-Type header.
692
- """
693
- content_type = self.headers.get("Content-Type")
694
- if content_type is None:
695
- return None
696
-
697
- return _parse_content_type_charset(content_type)
698
-
699
- def _get_content_decoder(self) -> ContentDecoder:
700
- """
701
- Returns a decoder instance which can be used to decode the raw byte
702
- content, depending on the Content-Encoding used in the response.
703
- """
704
- if not hasattr(self, "_decoder"):
705
- decoders: list[ContentDecoder] = []
706
- values = self.headers.get_list("content-encoding", split_commas=True)
707
- for value in values:
708
- value = value.strip().lower()
709
- try:
710
- decoder_cls = SUPPORTED_DECODERS[value]
711
- decoders.append(decoder_cls())
712
- except KeyError:
713
- continue
714
-
715
- if len(decoders) == 1:
716
- self._decoder = decoders[0]
717
- elif len(decoders) > 1:
718
- self._decoder = MultiDecoder(children=decoders)
719
- else:
720
- self._decoder = IdentityDecoder()
721
-
722
- return self._decoder
723
-
724
- @property
725
- def is_informational(self) -> bool:
726
- """
727
- A property which is `True` for 1xx status codes, `False` otherwise.
728
- """
729
- return codes.is_informational(self.status_code)
730
-
731
- @property
732
- def is_success(self) -> bool:
733
- """
734
- A property which is `True` for 2xx status codes, `False` otherwise.
735
- """
736
- return codes.is_success(self.status_code)
737
-
738
- @property
739
- def is_redirect(self) -> bool:
740
- """
741
- A property which is `True` for 3xx status codes, `False` otherwise.
742
-
743
- Note that not all responses with a 3xx status code indicate a URL redirect.
744
-
745
- Use `response.has_redirect_location` to determine responses with a properly
746
- formed URL redirection.
747
- """
748
- return codes.is_redirect(self.status_code)
749
-
750
- @property
751
- def is_client_error(self) -> bool:
752
- """
753
- A property which is `True` for 4xx status codes, `False` otherwise.
754
- """
755
- return codes.is_client_error(self.status_code)
756
-
757
- @property
758
- def is_server_error(self) -> bool:
759
- """
760
- A property which is `True` for 5xx status codes, `False` otherwise.
761
- """
762
- return codes.is_server_error(self.status_code)
763
-
764
- @property
765
- def is_error(self) -> bool:
766
- """
767
- A property which is `True` for 4xx and 5xx status codes, `False` otherwise.
768
- """
769
- return codes.is_error(self.status_code)
770
-
771
- @property
772
- def has_redirect_location(self) -> bool:
773
- """
774
- Returns True for 3xx responses with a properly formed URL redirection,
775
- `False` otherwise.
776
- """
777
- return (
778
- self.status_code
779
- in (
780
- # 301 (Cacheable redirect. Method may change to GET.)
781
- codes.MOVED_PERMANENTLY,
782
- # 302 (Uncacheable redirect. Method may change to GET.)
783
- codes.FOUND,
784
- # 303 (Client should make a GET or HEAD request.)
785
- codes.SEE_OTHER,
786
- # 307 (Equiv. 302, but retain method)
787
- codes.TEMPORARY_REDIRECT,
788
- # 308 (Equiv. 301, but retain method)
789
- codes.PERMANENT_REDIRECT,
790
- )
791
- and "Location" in self.headers
792
- )
793
-
794
- def raise_for_status(self) -> Response:
795
- """
796
- Raise the `HTTPStatusError` if one occurred.
797
- """
798
- request = self._request
799
- if request is None:
800
- raise RuntimeError(
801
- "Cannot call `raise_for_status` as the request "
802
- "instance has not been set on this response."
803
- )
804
-
805
- if self.is_success:
806
- return self
807
-
808
- if self.has_redirect_location:
809
- message = (
810
- "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n"
811
- "Redirect location: '{0.headers[location]}'\n"
812
- "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}"
813
- )
814
- else:
815
- message = (
816
- "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n"
817
- "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}"
818
- )
819
-
820
- status_class = self.status_code // 100
821
- error_types = {
822
- 1: "Informational response",
823
- 3: "Redirect response",
824
- 4: "Client error",
825
- 5: "Server error",
826
- }
827
- error_type = error_types.get(status_class, "Invalid status code")
828
- message = message.format(self, error_type=error_type)
829
- raise HTTPStatusError(message, request=request, response=self)
830
-
831
- def json(self, **kwargs: typing.Any) -> typing.Any:
832
- return jsonlib.loads(self.content, **kwargs)
833
-
834
- @property
835
- def cookies(self) -> Cookies:
836
- if not hasattr(self, "_cookies"):
837
- self._cookies = Cookies()
838
- self._cookies.extract_cookies(self)
839
- return self._cookies
840
-
841
- @property
842
- def links(self) -> dict[str | None, dict[str, str]]:
843
- """
844
- Returns the parsed header links of the response, if any
845
- """
846
- header = self.headers.get("link")
847
- if header is None:
848
- return {}
849
-
850
- return {
851
- (link.get("rel") or link.get("url")): link
852
- for link in _parse_header_links(header)
853
- }
854
-
855
- @property
856
- def num_bytes_downloaded(self) -> int:
857
- return self._num_bytes_downloaded
858
-
859
- def __repr__(self) -> str:
860
- return f"<Response [{self.status_code} {self.reason_phrase}]>"
861
-
862
- def __getstate__(self) -> dict[str, typing.Any]:
863
- return {
864
- name: value
865
- for name, value in self.__dict__.items()
866
- if name not in ["extensions", "stream", "is_closed", "_decoder"]
867
- }
868
-
869
- def __setstate__(self, state: dict[str, typing.Any]) -> None:
870
- for name, value in state.items():
871
- setattr(self, name, value)
872
- self.is_closed = True
873
- self.extensions = {}
874
- self.stream = UnattachedStream()
875
-
876
- def read(self) -> bytes:
877
- """
878
- Read and return the response content.
879
- """
880
- if not hasattr(self, "_content"):
881
- self._content = b"".join(self.iter_bytes())
882
- return self._content
883
-
884
- def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]:
885
- """
886
- A byte-iterator over the decoded response content.
887
- This allows us to handle gzip, deflate, brotli, and zstd encoded responses.
888
- """
889
- if hasattr(self, "_content"):
890
- chunk_size = len(self._content) if chunk_size is None else chunk_size
891
- for i in range(0, len(self._content), max(chunk_size, 1)):
892
- yield self._content[i : i + chunk_size]
893
- else:
894
- decoder = self._get_content_decoder()
895
- chunker = ByteChunker(chunk_size=chunk_size)
896
- with request_context(request=self._request):
897
- for raw_bytes in self.iter_raw():
898
- decoded = decoder.decode(raw_bytes)
899
- for chunk in chunker.decode(decoded):
900
- yield chunk
901
- decoded = decoder.flush()
902
- for chunk in chunker.decode(decoded):
903
- yield chunk # pragma: no cover
904
- for chunk in chunker.flush():
905
- yield chunk
906
-
907
- def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]:
908
- """
909
- A str-iterator over the decoded response content
910
- that handles both gzip, deflate, etc but also detects the content's
911
- string encoding.
912
- """
913
- decoder = TextDecoder(encoding=self.encoding or "utf-8")
914
- chunker = TextChunker(chunk_size=chunk_size)
915
- with request_context(request=self._request):
916
- for byte_content in self.iter_bytes():
917
- text_content = decoder.decode(byte_content)
918
- for chunk in chunker.decode(text_content):
919
- yield chunk
920
- text_content = decoder.flush()
921
- for chunk in chunker.decode(text_content):
922
- yield chunk # pragma: no cover
923
- for chunk in chunker.flush():
924
- yield chunk
925
-
926
- def iter_lines(self) -> typing.Iterator[str]:
927
- decoder = LineDecoder()
928
- with request_context(request=self._request):
929
- for text in self.iter_text():
930
- for line in decoder.decode(text):
931
- yield line
932
- for line in decoder.flush():
933
- yield line
934
-
935
- def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]:
936
- """
937
- A byte-iterator over the raw response content.
938
- """
939
- if self.is_stream_consumed:
940
- raise StreamConsumed()
941
- if self.is_closed:
942
- raise StreamClosed()
943
- if not isinstance(self.stream, SyncByteStream):
944
- raise RuntimeError("Attempted to call a sync iterator on an async stream.")
945
-
946
- self.is_stream_consumed = True
947
- self._num_bytes_downloaded = 0
948
- chunker = ByteChunker(chunk_size=chunk_size)
949
-
950
- with request_context(request=self._request):
951
- for raw_stream_bytes in self.stream:
952
- self._num_bytes_downloaded += len(raw_stream_bytes)
953
- for chunk in chunker.decode(raw_stream_bytes):
954
- yield chunk
955
-
956
- for chunk in chunker.flush():
957
- yield chunk
958
-
959
- self.close()
960
-
961
- def close(self) -> None:
962
- """
963
- Close the response and release the connection.
964
- Automatically called if the response body is read to completion.
965
- """
966
- if not isinstance(self.stream, SyncByteStream):
967
- raise RuntimeError("Attempted to call an sync close on an async stream.")
968
-
969
- if not self.is_closed:
970
- self.is_closed = True
971
- with request_context(request=self._request):
972
- self.stream.close()
973
-
974
- async def aread(self) -> bytes:
975
- """
976
- Read and return the response content.
977
- """
978
- if not hasattr(self, "_content"):
979
- self._content = b"".join([part async for part in self.aiter_bytes()])
980
- return self._content
981
-
982
- async def aiter_bytes(
983
- self, chunk_size: int | None = None
984
- ) -> typing.AsyncIterator[bytes]:
985
- """
986
- A byte-iterator over the decoded response content.
987
- This allows us to handle gzip, deflate, brotli, and zstd encoded responses.
988
- """
989
- if hasattr(self, "_content"):
990
- chunk_size = len(self._content) if chunk_size is None else chunk_size
991
- for i in range(0, len(self._content), max(chunk_size, 1)):
992
- yield self._content[i : i + chunk_size]
993
- else:
994
- decoder = self._get_content_decoder()
995
- chunker = ByteChunker(chunk_size=chunk_size)
996
- with request_context(request=self._request):
997
- async for raw_bytes in self.aiter_raw():
998
- decoded = decoder.decode(raw_bytes)
999
- for chunk in chunker.decode(decoded):
1000
- yield chunk
1001
- decoded = decoder.flush()
1002
- for chunk in chunker.decode(decoded):
1003
- yield chunk # pragma: no cover
1004
- for chunk in chunker.flush():
1005
- yield chunk
1006
-
1007
- async def aiter_text(
1008
- self, chunk_size: int | None = None
1009
- ) -> typing.AsyncIterator[str]:
1010
- """
1011
- A str-iterator over the decoded response content
1012
- that handles both gzip, deflate, etc but also detects the content's
1013
- string encoding.
1014
- """
1015
- decoder = TextDecoder(encoding=self.encoding or "utf-8")
1016
- chunker = TextChunker(chunk_size=chunk_size)
1017
- with request_context(request=self._request):
1018
- async for byte_content in self.aiter_bytes():
1019
- text_content = decoder.decode(byte_content)
1020
- for chunk in chunker.decode(text_content):
1021
- yield chunk
1022
- text_content = decoder.flush()
1023
- for chunk in chunker.decode(text_content):
1024
- yield chunk # pragma: no cover
1025
- for chunk in chunker.flush():
1026
- yield chunk
1027
-
1028
- async def aiter_lines(self) -> typing.AsyncIterator[str]:
1029
- decoder = LineDecoder()
1030
- with request_context(request=self._request):
1031
- async for text in self.aiter_text():
1032
- for line in decoder.decode(text):
1033
- yield line
1034
- for line in decoder.flush():
1035
- yield line
1036
-
1037
- async def aiter_raw(
1038
- self, chunk_size: int | None = None
1039
- ) -> typing.AsyncIterator[bytes]:
1040
- """
1041
- A byte-iterator over the raw response content.
1042
- """
1043
- if self.is_stream_consumed:
1044
- raise StreamConsumed()
1045
- if self.is_closed:
1046
- raise StreamClosed()
1047
- if not isinstance(self.stream, AsyncByteStream):
1048
- raise RuntimeError("Attempted to call an async iterator on an sync stream.")
1049
-
1050
- self.is_stream_consumed = True
1051
- self._num_bytes_downloaded = 0
1052
- chunker = ByteChunker(chunk_size=chunk_size)
1053
-
1054
- with request_context(request=self._request):
1055
- async for raw_stream_bytes in self.stream:
1056
- self._num_bytes_downloaded += len(raw_stream_bytes)
1057
- for chunk in chunker.decode(raw_stream_bytes):
1058
- yield chunk
1059
-
1060
- for chunk in chunker.flush():
1061
- yield chunk
1062
-
1063
- await self.aclose()
1064
-
1065
- async def aclose(self) -> None:
1066
- """
1067
- Close the response and release the connection.
1068
- Automatically called if the response body is read to completion.
1069
- """
1070
- if not isinstance(self.stream, AsyncByteStream):
1071
- raise RuntimeError("Attempted to call an async close on an sync stream.")
1072
-
1073
- if not self.is_closed:
1074
- self.is_closed = True
1075
- with request_context(request=self._request):
1076
- await self.stream.aclose()
1077
-
1078
-
1079
- class Cookies(typing.MutableMapping[str, str]):
1080
- """
1081
- HTTP Cookies, as a mutable mapping.
1082
- """
1083
-
1084
- def __init__(self, cookies: CookieTypes | None = None) -> None:
1085
- if cookies is None or isinstance(cookies, dict):
1086
- self.jar = CookieJar()
1087
- if isinstance(cookies, dict):
1088
- for key, value in cookies.items():
1089
- self.set(key, value)
1090
- elif isinstance(cookies, list):
1091
- self.jar = CookieJar()
1092
- for key, value in cookies:
1093
- self.set(key, value)
1094
- elif isinstance(cookies, Cookies):
1095
- self.jar = CookieJar()
1096
- for cookie in cookies.jar:
1097
- self.jar.set_cookie(cookie)
1098
- else:
1099
- self.jar = cookies
1100
-
1101
- def extract_cookies(self, response: Response) -> None:
1102
- """
1103
- Loads any cookies based on the response `Set-Cookie` headers.
1104
- """
1105
- urllib_response = self._CookieCompatResponse(response)
1106
- urllib_request = self._CookieCompatRequest(response.request)
1107
-
1108
- self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore
1109
-
1110
- def set_cookie_header(self, request: Request) -> None:
1111
- """
1112
- Sets an appropriate 'Cookie:' HTTP header on the `Request`.
1113
- """
1114
- urllib_request = self._CookieCompatRequest(request)
1115
- self.jar.add_cookie_header(urllib_request)
1116
-
1117
- def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None:
1118
- """
1119
- Set a cookie value by name. May optionally include domain and path.
1120
- """
1121
- kwargs = {
1122
- "version": 0,
1123
- "name": name,
1124
- "value": value,
1125
- "port": None,
1126
- "port_specified": False,
1127
- "domain": domain,
1128
- "domain_specified": bool(domain),
1129
- "domain_initial_dot": domain.startswith("."),
1130
- "path": path,
1131
- "path_specified": bool(path),
1132
- "secure": False,
1133
- "expires": None,
1134
- "discard": True,
1135
- "comment": None,
1136
- "comment_url": None,
1137
- "rest": {"HttpOnly": None},
1138
- "rfc2109": False,
1139
- }
1140
- cookie = Cookie(**kwargs) # type: ignore
1141
- self.jar.set_cookie(cookie)
1142
-
1143
- def get( # type: ignore
1144
- self,
1145
- name: str,
1146
- default: str | None = None,
1147
- domain: str | None = None,
1148
- path: str | None = None,
1149
- ) -> str | None:
1150
- """
1151
- Get a cookie by name. May optionally include domain and path
1152
- in order to specify exactly which cookie to retrieve.
1153
- """
1154
- value = None
1155
- for cookie in self.jar:
1156
- if cookie.name == name:
1157
- if domain is None or cookie.domain == domain:
1158
- if path is None or cookie.path == path:
1159
- if value is not None:
1160
- message = f"Multiple cookies exist with name={name}"
1161
- raise CookieConflict(message)
1162
- value = cookie.value
1163
-
1164
- if value is None:
1165
- return default
1166
- return value
1167
-
1168
- def delete(
1169
- self,
1170
- name: str,
1171
- domain: str | None = None,
1172
- path: str | None = None,
1173
- ) -> None:
1174
- """
1175
- Delete a cookie by name. May optionally include domain and path
1176
- in order to specify exactly which cookie to delete.
1177
- """
1178
- if domain is not None and path is not None:
1179
- return self.jar.clear(domain, path, name)
1180
-
1181
- remove = [
1182
- cookie
1183
- for cookie in self.jar
1184
- if cookie.name == name
1185
- and (domain is None or cookie.domain == domain)
1186
- and (path is None or cookie.path == path)
1187
- ]
1188
-
1189
- for cookie in remove:
1190
- self.jar.clear(cookie.domain, cookie.path, cookie.name)
1191
-
1192
- def clear(self, domain: str | None = None, path: str | None = None) -> None:
1193
- """
1194
- Delete all cookies. Optionally include a domain and path in
1195
- order to only delete a subset of all the cookies.
1196
- """
1197
- args = []
1198
- if domain is not None:
1199
- args.append(domain)
1200
- if path is not None:
1201
- assert domain is not None
1202
- args.append(path)
1203
- self.jar.clear(*args)
1204
-
1205
- def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore
1206
- cookies = Cookies(cookies)
1207
- for cookie in cookies.jar:
1208
- self.jar.set_cookie(cookie)
1209
-
1210
- def __setitem__(self, name: str, value: str) -> None:
1211
- return self.set(name, value)
1212
-
1213
- def __getitem__(self, name: str) -> str:
1214
- value = self.get(name)
1215
- if value is None:
1216
- raise KeyError(name)
1217
- return value
1218
-
1219
- def __delitem__(self, name: str) -> None:
1220
- return self.delete(name)
1221
-
1222
- def __len__(self) -> int:
1223
- return len(self.jar)
1224
-
1225
- def __iter__(self) -> typing.Iterator[str]:
1226
- return (cookie.name for cookie in self.jar)
1227
-
1228
- def __bool__(self) -> bool:
1229
- for _ in self.jar:
1230
- return True
1231
- return False
1232
-
1233
- def __repr__(self) -> str:
1234
- cookies_repr = ", ".join(
1235
- [
1236
- f"<Cookie {cookie.name}={cookie.value} for {cookie.domain} />"
1237
- for cookie in self.jar
1238
- ]
1239
- )
1240
-
1241
- return f"<Cookies[{cookies_repr}]>"
1242
-
1243
- class _CookieCompatRequest(urllib.request.Request):
1244
- """
1245
- Wraps a `Request` instance up in a compatibility interface suitable
1246
- for use with `CookieJar` operations.
1247
- """
1248
-
1249
- def __init__(self, request: Request) -> None:
1250
- super().__init__(
1251
- url=str(request.url),
1252
- headers=dict(request.headers),
1253
- method=request.method,
1254
- )
1255
- self.request = request
1256
-
1257
- def add_unredirected_header(self, key: str, value: str) -> None:
1258
- super().add_unredirected_header(key, value)
1259
- self.request.headers[key] = value
1260
-
1261
- class _CookieCompatResponse:
1262
- """
1263
- Wraps a `Request` instance up in a compatibility interface suitable
1264
- for use with `CookieJar` operations.
1265
- """
1266
-
1267
- def __init__(self, response: Response) -> None:
1268
- self.response = response
1269
-
1270
- def info(self) -> email.message.Message:
1271
- info = email.message.Message()
1272
- for key, value in self.response.headers.multi_items():
1273
- # Note that setting `info[key]` here is an "append" operation,
1274
- # not a "replace" operation.
1275
- # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__
1276
- info[key] = value
1277
- return info
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_multipart.py DELETED
@@ -1,300 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import io
4
- import mimetypes
5
- import os
6
- import re
7
- import typing
8
- from pathlib import Path
9
-
10
- from ._types import (
11
- AsyncByteStream,
12
- FileContent,
13
- FileTypes,
14
- RequestData,
15
- RequestFiles,
16
- SyncByteStream,
17
- )
18
- from ._utils import (
19
- peek_filelike_length,
20
- primitive_value_to_str,
21
- to_bytes,
22
- )
23
-
24
- _HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"}
25
- _HTML5_FORM_ENCODING_REPLACEMENTS.update(
26
- {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B}
27
- )
28
- _HTML5_FORM_ENCODING_RE = re.compile(
29
- r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()])
30
- )
31
-
32
-
33
- def _format_form_param(name: str, value: str) -> bytes:
34
- """
35
- Encode a name/value pair within a multipart form.
36
- """
37
-
38
- def replacer(match: typing.Match[str]) -> str:
39
- return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)]
40
-
41
- value = _HTML5_FORM_ENCODING_RE.sub(replacer, value)
42
- return f'{name}="{value}"'.encode()
43
-
44
-
45
- def _guess_content_type(filename: str | None) -> str | None:
46
- """
47
- Guesses the mimetype based on a filename. Defaults to `application/octet-stream`.
48
-
49
- Returns `None` if `filename` is `None` or empty.
50
- """
51
- if filename:
52
- return mimetypes.guess_type(filename)[0] or "application/octet-stream"
53
- return None
54
-
55
-
56
- def get_multipart_boundary_from_content_type(
57
- content_type: bytes | None,
58
- ) -> bytes | None:
59
- if not content_type or not content_type.startswith(b"multipart/form-data"):
60
- return None
61
- # parse boundary according to
62
- # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1
63
- if b";" in content_type:
64
- for section in content_type.split(b";"):
65
- if section.strip().lower().startswith(b"boundary="):
66
- return section.strip()[len(b"boundary=") :].strip(b'"')
67
- return None
68
-
69
-
70
- class DataField:
71
- """
72
- A single form field item, within a multipart form field.
73
- """
74
-
75
- def __init__(self, name: str, value: str | bytes | int | float | None) -> None:
76
- if not isinstance(name, str):
77
- raise TypeError(
78
- f"Invalid type for name. Expected str, got {type(name)}: {name!r}"
79
- )
80
- if value is not None and not isinstance(value, (str, bytes, int, float)):
81
- raise TypeError(
82
- "Invalid type for value. Expected primitive type,"
83
- f" got {type(value)}: {value!r}"
84
- )
85
- self.name = name
86
- self.value: str | bytes = (
87
- value if isinstance(value, bytes) else primitive_value_to_str(value)
88
- )
89
-
90
- def render_headers(self) -> bytes:
91
- if not hasattr(self, "_headers"):
92
- name = _format_form_param("name", self.name)
93
- self._headers = b"".join(
94
- [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"]
95
- )
96
-
97
- return self._headers
98
-
99
- def render_data(self) -> bytes:
100
- if not hasattr(self, "_data"):
101
- self._data = to_bytes(self.value)
102
-
103
- return self._data
104
-
105
- def get_length(self) -> int:
106
- headers = self.render_headers()
107
- data = self.render_data()
108
- return len(headers) + len(data)
109
-
110
- def render(self) -> typing.Iterator[bytes]:
111
- yield self.render_headers()
112
- yield self.render_data()
113
-
114
-
115
- class FileField:
116
- """
117
- A single file field item, within a multipart form field.
118
- """
119
-
120
- CHUNK_SIZE = 64 * 1024
121
-
122
- def __init__(self, name: str, value: FileTypes) -> None:
123
- self.name = name
124
-
125
- fileobj: FileContent
126
-
127
- headers: dict[str, str] = {}
128
- content_type: str | None = None
129
-
130
- # This large tuple based API largely mirror's requests' API
131
- # It would be good to think of better APIs for this that we could
132
- # include in httpx 2.0 since variable length tuples(especially of 4 elements)
133
- # are quite unwieldly
134
- if isinstance(value, tuple):
135
- if len(value) == 2:
136
- # neither the 3rd parameter (content_type) nor the 4th (headers)
137
- # was included
138
- filename, fileobj = value
139
- elif len(value) == 3:
140
- filename, fileobj, content_type = value
141
- else:
142
- # all 4 parameters included
143
- filename, fileobj, content_type, headers = value # type: ignore
144
- else:
145
- filename = Path(str(getattr(value, "name", "upload"))).name
146
- fileobj = value
147
-
148
- if content_type is None:
149
- content_type = _guess_content_type(filename)
150
-
151
- has_content_type_header = any("content-type" in key.lower() for key in headers)
152
- if content_type is not None and not has_content_type_header:
153
- # note that unlike requests, we ignore the content_type provided in the 3rd
154
- # tuple element if it is also included in the headers requests does
155
- # the opposite (it overwrites the headerwith the 3rd tuple element)
156
- headers["Content-Type"] = content_type
157
-
158
- if isinstance(fileobj, io.StringIO):
159
- raise TypeError(
160
- "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'."
161
- )
162
- if isinstance(fileobj, io.TextIOBase):
163
- raise TypeError(
164
- "Multipart file uploads must be opened in binary mode, not text mode."
165
- )
166
-
167
- self.filename = filename
168
- self.file = fileobj
169
- self.headers = headers
170
-
171
- def get_length(self) -> int | None:
172
- headers = self.render_headers()
173
-
174
- if isinstance(self.file, (str, bytes)):
175
- return len(headers) + len(to_bytes(self.file))
176
-
177
- file_length = peek_filelike_length(self.file)
178
-
179
- # If we can't determine the filesize without reading it into memory,
180
- # then return `None` here, to indicate an unknown file length.
181
- if file_length is None:
182
- return None
183
-
184
- return len(headers) + file_length
185
-
186
- def render_headers(self) -> bytes:
187
- if not hasattr(self, "_headers"):
188
- parts = [
189
- b"Content-Disposition: form-data; ",
190
- _format_form_param("name", self.name),
191
- ]
192
- if self.filename:
193
- filename = _format_form_param("filename", self.filename)
194
- parts.extend([b"; ", filename])
195
- for header_name, header_value in self.headers.items():
196
- key, val = f"\r\n{header_name}: ".encode(), header_value.encode()
197
- parts.extend([key, val])
198
- parts.append(b"\r\n\r\n")
199
- self._headers = b"".join(parts)
200
-
201
- return self._headers
202
-
203
- def render_data(self) -> typing.Iterator[bytes]:
204
- if isinstance(self.file, (str, bytes)):
205
- yield to_bytes(self.file)
206
- return
207
-
208
- if hasattr(self.file, "seek"):
209
- try:
210
- self.file.seek(0)
211
- except io.UnsupportedOperation:
212
- pass
213
-
214
- chunk = self.file.read(self.CHUNK_SIZE)
215
- while chunk:
216
- yield to_bytes(chunk)
217
- chunk = self.file.read(self.CHUNK_SIZE)
218
-
219
- def render(self) -> typing.Iterator[bytes]:
220
- yield self.render_headers()
221
- yield from self.render_data()
222
-
223
-
224
- class MultipartStream(SyncByteStream, AsyncByteStream):
225
- """
226
- Request content as streaming multipart encoded form data.
227
- """
228
-
229
- def __init__(
230
- self,
231
- data: RequestData,
232
- files: RequestFiles,
233
- boundary: bytes | None = None,
234
- ) -> None:
235
- if boundary is None:
236
- boundary = os.urandom(16).hex().encode("ascii")
237
-
238
- self.boundary = boundary
239
- self.content_type = "multipart/form-data; boundary=%s" % boundary.decode(
240
- "ascii"
241
- )
242
- self.fields = list(self._iter_fields(data, files))
243
-
244
- def _iter_fields(
245
- self, data: RequestData, files: RequestFiles
246
- ) -> typing.Iterator[FileField | DataField]:
247
- for name, value in data.items():
248
- if isinstance(value, (tuple, list)):
249
- for item in value:
250
- yield DataField(name=name, value=item)
251
- else:
252
- yield DataField(name=name, value=value)
253
-
254
- file_items = files.items() if isinstance(files, typing.Mapping) else files
255
- for name, value in file_items:
256
- yield FileField(name=name, value=value)
257
-
258
- def iter_chunks(self) -> typing.Iterator[bytes]:
259
- for field in self.fields:
260
- yield b"--%s\r\n" % self.boundary
261
- yield from field.render()
262
- yield b"\r\n"
263
- yield b"--%s--\r\n" % self.boundary
264
-
265
- def get_content_length(self) -> int | None:
266
- """
267
- Return the length of the multipart encoded content, or `None` if
268
- any of the files have a length that cannot be determined upfront.
269
- """
270
- boundary_length = len(self.boundary)
271
- length = 0
272
-
273
- for field in self.fields:
274
- field_length = field.get_length()
275
- if field_length is None:
276
- return None
277
-
278
- length += 2 + boundary_length + 2 # b"--{boundary}\r\n"
279
- length += field_length
280
- length += 2 # b"\r\n"
281
-
282
- length += 2 + boundary_length + 4 # b"--{boundary}--\r\n"
283
- return length
284
-
285
- # Content stream interface.
286
-
287
- def get_headers(self) -> dict[str, str]:
288
- content_length = self.get_content_length()
289
- content_type = self.content_type
290
- if content_length is None:
291
- return {"Transfer-Encoding": "chunked", "Content-Type": content_type}
292
- return {"Content-Length": str(content_length), "Content-Type": content_type}
293
-
294
- def __iter__(self) -> typing.Iterator[bytes]:
295
- for chunk in self.iter_chunks():
296
- yield chunk
297
-
298
- async def __aiter__(self) -> typing.AsyncIterator[bytes]:
299
- for chunk in self.iter_chunks():
300
- yield chunk
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_status_codes.py DELETED
@@ -1,162 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from enum import IntEnum
4
-
5
- __all__ = ["codes"]
6
-
7
-
8
- class codes(IntEnum):
9
- """HTTP status codes and reason phrases
10
-
11
- Status codes from the following RFCs are all observed:
12
-
13
- * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
14
- * RFC 6585: Additional HTTP Status Codes
15
- * RFC 3229: Delta encoding in HTTP
16
- * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518
17
- * RFC 5842: Binding Extensions to WebDAV
18
- * RFC 7238: Permanent Redirect
19
- * RFC 2295: Transparent Content Negotiation in HTTP
20
- * RFC 2774: An HTTP Extension Framework
21
- * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
22
- * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
23
- * RFC 7725: An HTTP Status Code to Report Legal Obstacles
24
- * RFC 8297: An HTTP Status Code for Indicating Hints
25
- * RFC 8470: Using Early Data in HTTP
26
- """
27
-
28
- def __new__(cls, value: int, phrase: str = "") -> codes:
29
- obj = int.__new__(cls, value)
30
- obj._value_ = value
31
-
32
- obj.phrase = phrase # type: ignore[attr-defined]
33
- return obj
34
-
35
- def __str__(self) -> str:
36
- return str(self.value)
37
-
38
- @classmethod
39
- def get_reason_phrase(cls, value: int) -> str:
40
- try:
41
- return codes(value).phrase # type: ignore
42
- except ValueError:
43
- return ""
44
-
45
- @classmethod
46
- def is_informational(cls, value: int) -> bool:
47
- """
48
- Returns `True` for 1xx status codes, `False` otherwise.
49
- """
50
- return 100 <= value <= 199
51
-
52
- @classmethod
53
- def is_success(cls, value: int) -> bool:
54
- """
55
- Returns `True` for 2xx status codes, `False` otherwise.
56
- """
57
- return 200 <= value <= 299
58
-
59
- @classmethod
60
- def is_redirect(cls, value: int) -> bool:
61
- """
62
- Returns `True` for 3xx status codes, `False` otherwise.
63
- """
64
- return 300 <= value <= 399
65
-
66
- @classmethod
67
- def is_client_error(cls, value: int) -> bool:
68
- """
69
- Returns `True` for 4xx status codes, `False` otherwise.
70
- """
71
- return 400 <= value <= 499
72
-
73
- @classmethod
74
- def is_server_error(cls, value: int) -> bool:
75
- """
76
- Returns `True` for 5xx status codes, `False` otherwise.
77
- """
78
- return 500 <= value <= 599
79
-
80
- @classmethod
81
- def is_error(cls, value: int) -> bool:
82
- """
83
- Returns `True` for 4xx or 5xx status codes, `False` otherwise.
84
- """
85
- return 400 <= value <= 599
86
-
87
- # informational
88
- CONTINUE = 100, "Continue"
89
- SWITCHING_PROTOCOLS = 101, "Switching Protocols"
90
- PROCESSING = 102, "Processing"
91
- EARLY_HINTS = 103, "Early Hints"
92
-
93
- # success
94
- OK = 200, "OK"
95
- CREATED = 201, "Created"
96
- ACCEPTED = 202, "Accepted"
97
- NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information"
98
- NO_CONTENT = 204, "No Content"
99
- RESET_CONTENT = 205, "Reset Content"
100
- PARTIAL_CONTENT = 206, "Partial Content"
101
- MULTI_STATUS = 207, "Multi-Status"
102
- ALREADY_REPORTED = 208, "Already Reported"
103
- IM_USED = 226, "IM Used"
104
-
105
- # redirection
106
- MULTIPLE_CHOICES = 300, "Multiple Choices"
107
- MOVED_PERMANENTLY = 301, "Moved Permanently"
108
- FOUND = 302, "Found"
109
- SEE_OTHER = 303, "See Other"
110
- NOT_MODIFIED = 304, "Not Modified"
111
- USE_PROXY = 305, "Use Proxy"
112
- TEMPORARY_REDIRECT = 307, "Temporary Redirect"
113
- PERMANENT_REDIRECT = 308, "Permanent Redirect"
114
-
115
- # client error
116
- BAD_REQUEST = 400, "Bad Request"
117
- UNAUTHORIZED = 401, "Unauthorized"
118
- PAYMENT_REQUIRED = 402, "Payment Required"
119
- FORBIDDEN = 403, "Forbidden"
120
- NOT_FOUND = 404, "Not Found"
121
- METHOD_NOT_ALLOWED = 405, "Method Not Allowed"
122
- NOT_ACCEPTABLE = 406, "Not Acceptable"
123
- PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required"
124
- REQUEST_TIMEOUT = 408, "Request Timeout"
125
- CONFLICT = 409, "Conflict"
126
- GONE = 410, "Gone"
127
- LENGTH_REQUIRED = 411, "Length Required"
128
- PRECONDITION_FAILED = 412, "Precondition Failed"
129
- REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large"
130
- REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long"
131
- UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type"
132
- REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable"
133
- EXPECTATION_FAILED = 417, "Expectation Failed"
134
- IM_A_TEAPOT = 418, "I'm a teapot"
135
- MISDIRECTED_REQUEST = 421, "Misdirected Request"
136
- UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity"
137
- LOCKED = 423, "Locked"
138
- FAILED_DEPENDENCY = 424, "Failed Dependency"
139
- TOO_EARLY = 425, "Too Early"
140
- UPGRADE_REQUIRED = 426, "Upgrade Required"
141
- PRECONDITION_REQUIRED = 428, "Precondition Required"
142
- TOO_MANY_REQUESTS = 429, "Too Many Requests"
143
- REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large"
144
- UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons"
145
-
146
- # server errors
147
- INTERNAL_SERVER_ERROR = 500, "Internal Server Error"
148
- NOT_IMPLEMENTED = 501, "Not Implemented"
149
- BAD_GATEWAY = 502, "Bad Gateway"
150
- SERVICE_UNAVAILABLE = 503, "Service Unavailable"
151
- GATEWAY_TIMEOUT = 504, "Gateway Timeout"
152
- HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported"
153
- VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates"
154
- INSUFFICIENT_STORAGE = 507, "Insufficient Storage"
155
- LOOP_DETECTED = 508, "Loop Detected"
156
- NOT_EXTENDED = 510, "Not Extended"
157
- NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required"
158
-
159
-
160
- # Include lower-case styles for `requests` compatibility.
161
- for code in codes:
162
- setattr(codes, code._name_.lower(), int(code))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__init__.py DELETED
@@ -1,15 +0,0 @@
1
- from .asgi import *
2
- from .base import *
3
- from .default import *
4
- from .mock import *
5
- from .wsgi import *
6
-
7
- __all__ = [
8
- "ASGITransport",
9
- "AsyncBaseTransport",
10
- "BaseTransport",
11
- "AsyncHTTPTransport",
12
- "HTTPTransport",
13
- "MockTransport",
14
- "WSGITransport",
15
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/__init__.cpython-310.pyc DELETED
Binary file (439 Bytes)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/asgi.cpython-310.pyc DELETED
Binary file (5.21 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/base.cpython-310.pyc DELETED
Binary file (3.47 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/default.cpython-310.pyc DELETED
Binary file (10.1 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/mock.cpython-310.pyc DELETED
Binary file (1.45 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/__pycache__/wsgi.cpython-310.pyc DELETED
Binary file (5.26 kB)
 
emissary-ml/llm-scripts/fine-tuning/llama3/venv/lib/python3.10/site-packages/httpx/_transports/asgi.py DELETED
@@ -1,187 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import typing
4
-
5
- from .._models import Request, Response
6
- from .._types import AsyncByteStream
7
- from .base import AsyncBaseTransport
8
-
9
- if typing.TYPE_CHECKING: # pragma: no cover
10
- import asyncio
11
-
12
- import trio
13
-
14
- Event = typing.Union[asyncio.Event, trio.Event]
15
-
16
-
17
- _Message = typing.MutableMapping[str, typing.Any]
18
- _Receive = typing.Callable[[], typing.Awaitable[_Message]]
19
- _Send = typing.Callable[
20
- [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None]
21
- ]
22
- _ASGIApp = typing.Callable[
23
- [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None]
24
- ]
25
-
26
- __all__ = ["ASGITransport"]
27
-
28
-
29
- def is_running_trio() -> bool:
30
- try:
31
- # sniffio is a dependency of trio.
32
-
33
- # See https://github.com/python-trio/trio/issues/2802
34
- import sniffio
35
-
36
- if sniffio.current_async_library() == "trio":
37
- return True
38
- except ImportError: # pragma: nocover
39
- pass
40
-
41
- return False
42
-
43
-
44
- def create_event() -> Event:
45
- if is_running_trio():
46
- import trio
47
-
48
- return trio.Event()
49
-
50
- import asyncio
51
-
52
- return asyncio.Event()
53
-
54
-
55
- class ASGIResponseStream(AsyncByteStream):
56
- def __init__(self, body: list[bytes]) -> None:
57
- self._body = body
58
-
59
- async def __aiter__(self) -> typing.AsyncIterator[bytes]:
60
- yield b"".join(self._body)
61
-
62
-
63
- class ASGITransport(AsyncBaseTransport):
64
- """
65
- A custom AsyncTransport that handles sending requests directly to an ASGI app.
66
-
67
- ```python
68
- transport = httpx.ASGITransport(
69
- app=app,
70
- root_path="/submount",
71
- client=("1.2.3.4", 123)
72
- )
73
- client = httpx.AsyncClient(transport=transport)
74
- ```
75
-
76
- Arguments:
77
-
78
- * `app` - The ASGI application.
79
- * `raise_app_exceptions` - Boolean indicating if exceptions in the application
80
- should be raised. Default to `True`. Can be set to `False` for use cases
81
- such as testing the content of a client 500 response.
82
- * `root_path` - The root path on which the ASGI application should be mounted.
83
- * `client` - A two-tuple indicating the client IP and port of incoming requests.
84
- ```
85
- """
86
-
87
- def __init__(
88
- self,
89
- app: _ASGIApp,
90
- raise_app_exceptions: bool = True,
91
- root_path: str = "",
92
- client: tuple[str, int] = ("127.0.0.1", 123),
93
- ) -> None:
94
- self.app = app
95
- self.raise_app_exceptions = raise_app_exceptions
96
- self.root_path = root_path
97
- self.client = client
98
-
99
- async def handle_async_request(
100
- self,
101
- request: Request,
102
- ) -> Response:
103
- assert isinstance(request.stream, AsyncByteStream)
104
-
105
- # ASGI scope.
106
- scope = {
107
- "type": "http",
108
- "asgi": {"version": "3.0"},
109
- "http_version": "1.1",
110
- "method": request.method,
111
- "headers": [(k.lower(), v) for (k, v) in request.headers.raw],
112
- "scheme": request.url.scheme,
113
- "path": request.url.path,
114
- "raw_path": request.url.raw_path.split(b"?")[0],
115
- "query_string": request.url.query,
116
- "server": (request.url.host, request.url.port),
117
- "client": self.client,
118
- "root_path": self.root_path,
119
- }
120
-
121
- # Request.
122
- request_body_chunks = request.stream.__aiter__()
123
- request_complete = False
124
-
125
- # Response.
126
- status_code = None
127
- response_headers = None
128
- body_parts = []
129
- response_started = False
130
- response_complete = create_event()
131
-
132
- # ASGI callables.
133
-
134
- async def receive() -> dict[str, typing.Any]:
135
- nonlocal request_complete
136
-
137
- if request_complete:
138
- await response_complete.wait()
139
- return {"type": "http.disconnect"}
140
-
141
- try:
142
- body = await request_body_chunks.__anext__()
143
- except StopAsyncIteration:
144
- request_complete = True
145
- return {"type": "http.request", "body": b"", "more_body": False}
146
- return {"type": "http.request", "body": body, "more_body": True}
147
-
148
- async def send(message: typing.MutableMapping[str, typing.Any]) -> None:
149
- nonlocal status_code, response_headers, response_started
150
-
151
- if message["type"] == "http.response.start":
152
- assert not response_started
153
-
154
- status_code = message["status"]
155
- response_headers = message.get("headers", [])
156
- response_started = True
157
-
158
- elif message["type"] == "http.response.body":
159
- assert not response_complete.is_set()
160
- body = message.get("body", b"")
161
- more_body = message.get("more_body", False)
162
-
163
- if body and request.method != "HEAD":
164
- body_parts.append(body)
165
-
166
- if not more_body:
167
- response_complete.set()
168
-
169
- try:
170
- await self.app(scope, receive, send)
171
- except Exception: # noqa: PIE-786
172
- if self.raise_app_exceptions:
173
- raise
174
-
175
- response_complete.set()
176
- if status_code is None:
177
- status_code = 500
178
- if response_headers is None:
179
- response_headers = {}
180
-
181
- assert response_complete.is_set()
182
- assert status_code is not None
183
- assert response_headers is not None
184
-
185
- stream = ASGIResponseStream(body_parts)
186
-
187
- return Response(status_code, headers=response_headers, stream=stream)