applied-ai-018 commited on
Commit
0a5da9d
·
verified ·
1 Parent(s): e20593c

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/universal/global_step80/zero/10.mlp.dense_h_to_4h.weight/exp_avg.pt +3 -0
  2. ckpts/universal/global_step80/zero/16.attention.dense.weight/exp_avg.pt +3 -0
  3. ckpts/universal/global_step80/zero/26.attention.query_key_value.weight/exp_avg_sq.pt +3 -0
  4. venv/lib/python3.10/site-packages/networkx/classes/coreviews.py +418 -0
  5. venv/lib/python3.10/site-packages/networkx/classes/reportviews.py +1438 -0
  6. venv/lib/python3.10/site-packages/networkx/classes/tests/__init__.py +0 -0
  7. venv/lib/python3.10/site-packages/networkx/classes/tests/dispatch_interface.py +194 -0
  8. venv/lib/python3.10/site-packages/networkx/classes/tests/historical_tests.py +474 -0
  9. venv/lib/python3.10/site-packages/networkx/classes/tests/test_digraph.py +331 -0
  10. venv/lib/python3.10/site-packages/networkx/classes/tests/test_digraph_historical.py +110 -0
  11. venv/lib/python3.10/site-packages/networkx/classes/tests/test_graph.py +920 -0
  12. venv/lib/python3.10/site-packages/networkx/classes/tests/test_graph_historical.py +12 -0
  13. venv/lib/python3.10/site-packages/networkx/classes/tests/test_graphviews.py +350 -0
  14. venv/lib/python3.10/site-packages/networkx/classes/tests/test_multidigraph.py +459 -0
  15. venv/lib/python3.10/site-packages/networkx/classes/tests/test_multigraph.py +528 -0
  16. venv/lib/python3.10/site-packages/networkx/classes/tests/test_reportviews.py +1427 -0
  17. venv/lib/python3.10/site-packages/networkx/classes/tests/test_special.py +131 -0
  18. venv/lib/python3.10/site-packages/networkx/classes/tests/test_subgraphviews.py +362 -0
  19. venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_asciidoc.cpython-310.pyc +0 -0
  20. venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_borderless.cpython-310.pyc +0 -0
  21. venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_html.cpython-310.pyc +0 -0
  22. venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_json.cpython-310.pyc +0 -0
  23. venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_jsonlines.cpython-310.pyc +0 -0
  24. venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_rst.cpython-310.pyc +0 -0
  25. venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_toml.cpython-310.pyc +0 -0
  26. venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_unicode.cpython-310.pyc +0 -0
  27. venv/lib/python3.10/site-packages/urllib3/__init__.py +211 -0
  28. venv/lib/python3.10/site-packages/urllib3/__pycache__/__init__.cpython-310.pyc +0 -0
  29. venv/lib/python3.10/site-packages/urllib3/__pycache__/_base_connection.cpython-310.pyc +0 -0
  30. venv/lib/python3.10/site-packages/urllib3/__pycache__/_collections.cpython-310.pyc +0 -0
  31. venv/lib/python3.10/site-packages/urllib3/__pycache__/_request_methods.cpython-310.pyc +0 -0
  32. venv/lib/python3.10/site-packages/urllib3/__pycache__/_version.cpython-310.pyc +0 -0
  33. venv/lib/python3.10/site-packages/urllib3/__pycache__/connection.cpython-310.pyc +0 -0
  34. venv/lib/python3.10/site-packages/urllib3/__pycache__/connectionpool.cpython-310.pyc +0 -0
  35. venv/lib/python3.10/site-packages/urllib3/__pycache__/exceptions.cpython-310.pyc +0 -0
  36. venv/lib/python3.10/site-packages/urllib3/__pycache__/fields.cpython-310.pyc +0 -0
  37. venv/lib/python3.10/site-packages/urllib3/__pycache__/http2.cpython-310.pyc +0 -0
  38. venv/lib/python3.10/site-packages/urllib3/__pycache__/poolmanager.cpython-310.pyc +0 -0
  39. venv/lib/python3.10/site-packages/urllib3/__pycache__/response.cpython-310.pyc +0 -0
  40. venv/lib/python3.10/site-packages/urllib3/_base_connection.py +172 -0
  41. venv/lib/python3.10/site-packages/urllib3/_collections.py +483 -0
  42. venv/lib/python3.10/site-packages/urllib3/_request_methods.py +279 -0
  43. venv/lib/python3.10/site-packages/urllib3/_version.py +4 -0
  44. venv/lib/python3.10/site-packages/urllib3/connection.py +930 -0
  45. venv/lib/python3.10/site-packages/urllib3/connectionpool.py +1186 -0
  46. venv/lib/python3.10/site-packages/urllib3/exceptions.py +321 -0
  47. venv/lib/python3.10/site-packages/urllib3/fields.py +341 -0
  48. venv/lib/python3.10/site-packages/urllib3/filepost.py +89 -0
  49. venv/lib/python3.10/site-packages/urllib3/http2.py +229 -0
  50. venv/lib/python3.10/site-packages/urllib3/poolmanager.py +638 -0
ckpts/universal/global_step80/zero/10.mlp.dense_h_to_4h.weight/exp_avg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b82dfe572398ce2278a6e8d3b9b331cf683d1e29803f71d551fd7165f273ceac
3
+ size 33555612
ckpts/universal/global_step80/zero/16.attention.dense.weight/exp_avg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6424091a23e4e1ca0c61fa49fd712ea7ea65a5a2b09aa69beb6c48023a1f9a07
3
+ size 16778396
ckpts/universal/global_step80/zero/26.attention.query_key_value.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47ebe72cc594178cd1f8712d2d0b7e1b42439ebaf6bf0610023f44b3804662ca
3
+ size 50332843
venv/lib/python3.10/site-packages/networkx/classes/coreviews.py ADDED
@@ -0,0 +1,418 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Views of core data structures such as nested Mappings (e.g. dict-of-dicts).
2
+ These ``Views`` often restrict element access, with either the entire view or
3
+ layers of nested mappings being read-only.
4
+ """
5
+ from collections.abc import Mapping
6
+
7
+ __all__ = [
8
+ "AtlasView",
9
+ "AdjacencyView",
10
+ "MultiAdjacencyView",
11
+ "UnionAtlas",
12
+ "UnionAdjacency",
13
+ "UnionMultiInner",
14
+ "UnionMultiAdjacency",
15
+ "FilterAtlas",
16
+ "FilterAdjacency",
17
+ "FilterMultiInner",
18
+ "FilterMultiAdjacency",
19
+ ]
20
+
21
+
22
+ class AtlasView(Mapping):
23
+ """An AtlasView is a Read-only Mapping of Mappings.
24
+
25
+ It is a View into a dict-of-dict data structure.
26
+ The inner level of dict is read-write. But the
27
+ outer level is read-only.
28
+
29
+ See Also
30
+ ========
31
+ AdjacencyView: View into dict-of-dict-of-dict
32
+ MultiAdjacencyView: View into dict-of-dict-of-dict-of-dict
33
+ """
34
+
35
+ __slots__ = ("_atlas",)
36
+
37
+ def __getstate__(self):
38
+ return {"_atlas": self._atlas}
39
+
40
+ def __setstate__(self, state):
41
+ self._atlas = state["_atlas"]
42
+
43
+ def __init__(self, d):
44
+ self._atlas = d
45
+
46
+ def __len__(self):
47
+ return len(self._atlas)
48
+
49
+ def __iter__(self):
50
+ return iter(self._atlas)
51
+
52
+ def __getitem__(self, key):
53
+ return self._atlas[key]
54
+
55
+ def copy(self):
56
+ return {n: self[n].copy() for n in self._atlas}
57
+
58
+ def __str__(self):
59
+ return str(self._atlas) # {nbr: self[nbr] for nbr in self})
60
+
61
+ def __repr__(self):
62
+ return f"{self.__class__.__name__}({self._atlas!r})"
63
+
64
+
65
+ class AdjacencyView(AtlasView):
66
+ """An AdjacencyView is a Read-only Map of Maps of Maps.
67
+
68
+ It is a View into a dict-of-dict-of-dict data structure.
69
+ The inner level of dict is read-write. But the
70
+ outer levels are read-only.
71
+
72
+ See Also
73
+ ========
74
+ AtlasView: View into dict-of-dict
75
+ MultiAdjacencyView: View into dict-of-dict-of-dict-of-dict
76
+ """
77
+
78
+ __slots__ = () # Still uses AtlasView slots names _atlas
79
+
80
+ def __getitem__(self, name):
81
+ return AtlasView(self._atlas[name])
82
+
83
+ def copy(self):
84
+ return {n: self[n].copy() for n in self._atlas}
85
+
86
+
87
+ class MultiAdjacencyView(AdjacencyView):
88
+ """An MultiAdjacencyView is a Read-only Map of Maps of Maps of Maps.
89
+
90
+ It is a View into a dict-of-dict-of-dict-of-dict data structure.
91
+ The inner level of dict is read-write. But the
92
+ outer levels are read-only.
93
+
94
+ See Also
95
+ ========
96
+ AtlasView: View into dict-of-dict
97
+ AdjacencyView: View into dict-of-dict-of-dict
98
+ """
99
+
100
+ __slots__ = () # Still uses AtlasView slots names _atlas
101
+
102
+ def __getitem__(self, name):
103
+ return AdjacencyView(self._atlas[name])
104
+
105
+ def copy(self):
106
+ return {n: self[n].copy() for n in self._atlas}
107
+
108
+
109
+ class UnionAtlas(Mapping):
110
+ """A read-only union of two atlases (dict-of-dict).
111
+
112
+ The two dict-of-dicts represent the inner dict of
113
+ an Adjacency: `G.succ[node]` and `G.pred[node]`.
114
+ The inner level of dict of both hold attribute key:value
115
+ pairs and is read-write. But the outer level is read-only.
116
+
117
+ See Also
118
+ ========
119
+ UnionAdjacency: View into dict-of-dict-of-dict
120
+ UnionMultiAdjacency: View into dict-of-dict-of-dict-of-dict
121
+ """
122
+
123
+ __slots__ = ("_succ", "_pred")
124
+
125
+ def __getstate__(self):
126
+ return {"_succ": self._succ, "_pred": self._pred}
127
+
128
+ def __setstate__(self, state):
129
+ self._succ = state["_succ"]
130
+ self._pred = state["_pred"]
131
+
132
+ def __init__(self, succ, pred):
133
+ self._succ = succ
134
+ self._pred = pred
135
+
136
+ def __len__(self):
137
+ return len(self._succ.keys() | self._pred.keys())
138
+
139
+ def __iter__(self):
140
+ return iter(set(self._succ.keys()) | set(self._pred.keys()))
141
+
142
+ def __getitem__(self, key):
143
+ try:
144
+ return self._succ[key]
145
+ except KeyError:
146
+ return self._pred[key]
147
+
148
+ def copy(self):
149
+ result = {nbr: dd.copy() for nbr, dd in self._succ.items()}
150
+ for nbr, dd in self._pred.items():
151
+ if nbr in result:
152
+ result[nbr].update(dd)
153
+ else:
154
+ result[nbr] = dd.copy()
155
+ return result
156
+
157
+ def __str__(self):
158
+ return str({nbr: self[nbr] for nbr in self})
159
+
160
+ def __repr__(self):
161
+ return f"{self.__class__.__name__}({self._succ!r}, {self._pred!r})"
162
+
163
+
164
+ class UnionAdjacency(Mapping):
165
+ """A read-only union of dict Adjacencies as a Map of Maps of Maps.
166
+
167
+ The two input dict-of-dict-of-dicts represent the union of
168
+ `G.succ` and `G.pred`. Return values are UnionAtlas
169
+ The inner level of dict is read-write. But the
170
+ middle and outer levels are read-only.
171
+
172
+ succ : a dict-of-dict-of-dict {node: nbrdict}
173
+ pred : a dict-of-dict-of-dict {node: nbrdict}
174
+ The keys for the two dicts should be the same
175
+
176
+ See Also
177
+ ========
178
+ UnionAtlas: View into dict-of-dict
179
+ UnionMultiAdjacency: View into dict-of-dict-of-dict-of-dict
180
+ """
181
+
182
+ __slots__ = ("_succ", "_pred")
183
+
184
+ def __getstate__(self):
185
+ return {"_succ": self._succ, "_pred": self._pred}
186
+
187
+ def __setstate__(self, state):
188
+ self._succ = state["_succ"]
189
+ self._pred = state["_pred"]
190
+
191
+ def __init__(self, succ, pred):
192
+ # keys must be the same for two input dicts
193
+ assert len(set(succ.keys()) ^ set(pred.keys())) == 0
194
+ self._succ = succ
195
+ self._pred = pred
196
+
197
+ def __len__(self):
198
+ return len(self._succ) # length of each dict should be the same
199
+
200
+ def __iter__(self):
201
+ return iter(self._succ)
202
+
203
+ def __getitem__(self, nbr):
204
+ return UnionAtlas(self._succ[nbr], self._pred[nbr])
205
+
206
+ def copy(self):
207
+ return {n: self[n].copy() for n in self._succ}
208
+
209
+ def __str__(self):
210
+ return str({nbr: self[nbr] for nbr in self})
211
+
212
+ def __repr__(self):
213
+ return f"{self.__class__.__name__}({self._succ!r}, {self._pred!r})"
214
+
215
+
216
+ class UnionMultiInner(UnionAtlas):
217
+ """A read-only union of two inner dicts of MultiAdjacencies.
218
+
219
+ The two input dict-of-dict-of-dicts represent the union of
220
+ `G.succ[node]` and `G.pred[node]` for MultiDiGraphs.
221
+ Return values are UnionAtlas.
222
+ The inner level of dict is read-write. But the outer levels are read-only.
223
+
224
+ See Also
225
+ ========
226
+ UnionAtlas: View into dict-of-dict
227
+ UnionAdjacency: View into dict-of-dict-of-dict
228
+ UnionMultiAdjacency: View into dict-of-dict-of-dict-of-dict
229
+ """
230
+
231
+ __slots__ = () # Still uses UnionAtlas slots names _succ, _pred
232
+
233
+ def __getitem__(self, node):
234
+ in_succ = node in self._succ
235
+ in_pred = node in self._pred
236
+ if in_succ:
237
+ if in_pred:
238
+ return UnionAtlas(self._succ[node], self._pred[node])
239
+ return UnionAtlas(self._succ[node], {})
240
+ return UnionAtlas({}, self._pred[node])
241
+
242
+ def copy(self):
243
+ nodes = set(self._succ.keys()) | set(self._pred.keys())
244
+ return {n: self[n].copy() for n in nodes}
245
+
246
+
247
+ class UnionMultiAdjacency(UnionAdjacency):
248
+ """A read-only union of two dict MultiAdjacencies.
249
+
250
+ The two input dict-of-dict-of-dict-of-dicts represent the union of
251
+ `G.succ` and `G.pred` for MultiDiGraphs. Return values are UnionAdjacency.
252
+ The inner level of dict is read-write. But the outer levels are read-only.
253
+
254
+ See Also
255
+ ========
256
+ UnionAtlas: View into dict-of-dict
257
+ UnionMultiInner: View into dict-of-dict-of-dict
258
+ """
259
+
260
+ __slots__ = () # Still uses UnionAdjacency slots names _succ, _pred
261
+
262
+ def __getitem__(self, node):
263
+ return UnionMultiInner(self._succ[node], self._pred[node])
264
+
265
+
266
+ class FilterAtlas(Mapping): # nodedict, nbrdict, keydict
267
+ """A read-only Mapping of Mappings with filtering criteria for nodes.
268
+
269
+ It is a view into a dict-of-dict data structure, and it selects only
270
+ nodes that meet the criteria defined by ``NODE_OK``.
271
+
272
+ See Also
273
+ ========
274
+ FilterAdjacency
275
+ FilterMultiInner
276
+ FilterMultiAdjacency
277
+ """
278
+
279
+ def __init__(self, d, NODE_OK):
280
+ self._atlas = d
281
+ self.NODE_OK = NODE_OK
282
+
283
+ def __len__(self):
284
+ return sum(1 for n in self)
285
+
286
+ def __iter__(self):
287
+ try: # check that NODE_OK has attr 'nodes'
288
+ node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas)
289
+ except AttributeError:
290
+ node_ok_shorter = False
291
+ if node_ok_shorter:
292
+ return (n for n in self.NODE_OK.nodes if n in self._atlas)
293
+ return (n for n in self._atlas if self.NODE_OK(n))
294
+
295
+ def __getitem__(self, key):
296
+ if key in self._atlas and self.NODE_OK(key):
297
+ return self._atlas[key]
298
+ raise KeyError(f"Key {key} not found")
299
+
300
+ def __str__(self):
301
+ return str({nbr: self[nbr] for nbr in self})
302
+
303
+ def __repr__(self):
304
+ return f"{self.__class__.__name__}({self._atlas!r}, {self.NODE_OK!r})"
305
+
306
+
307
+ class FilterAdjacency(Mapping): # edgedict
308
+ """A read-only Mapping of Mappings with filtering criteria for nodes and edges.
309
+
310
+ It is a view into a dict-of-dict-of-dict data structure, and it selects nodes
311
+ and edges that satisfy specific criteria defined by ``NODE_OK`` and ``EDGE_OK``,
312
+ respectively.
313
+
314
+ See Also
315
+ ========
316
+ FilterAtlas
317
+ FilterMultiInner
318
+ FilterMultiAdjacency
319
+ """
320
+
321
+ def __init__(self, d, NODE_OK, EDGE_OK):
322
+ self._atlas = d
323
+ self.NODE_OK = NODE_OK
324
+ self.EDGE_OK = EDGE_OK
325
+
326
+ def __len__(self):
327
+ return sum(1 for n in self)
328
+
329
+ def __iter__(self):
330
+ try: # check that NODE_OK has attr 'nodes'
331
+ node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas)
332
+ except AttributeError:
333
+ node_ok_shorter = False
334
+ if node_ok_shorter:
335
+ return (n for n in self.NODE_OK.nodes if n in self._atlas)
336
+ return (n for n in self._atlas if self.NODE_OK(n))
337
+
338
+ def __getitem__(self, node):
339
+ if node in self._atlas and self.NODE_OK(node):
340
+
341
+ def new_node_ok(nbr):
342
+ return self.NODE_OK(nbr) and self.EDGE_OK(node, nbr)
343
+
344
+ return FilterAtlas(self._atlas[node], new_node_ok)
345
+ raise KeyError(f"Key {node} not found")
346
+
347
+ def __str__(self):
348
+ return str({nbr: self[nbr] for nbr in self})
349
+
350
+ def __repr__(self):
351
+ name = self.__class__.__name__
352
+ return f"{name}({self._atlas!r}, {self.NODE_OK!r}, {self.EDGE_OK!r})"
353
+
354
+
355
+ class FilterMultiInner(FilterAdjacency): # muliedge_seconddict
356
+ """A read-only Mapping of Mappings with filtering criteria for nodes and edges.
357
+
358
+ It is a view into a dict-of-dict-of-dict-of-dict data structure, and it selects nodes
359
+ and edges that meet specific criteria defined by ``NODE_OK`` and ``EDGE_OK``.
360
+
361
+ See Also
362
+ ========
363
+ FilterAtlas
364
+ FilterAdjacency
365
+ FilterMultiAdjacency
366
+ """
367
+
368
+ def __iter__(self):
369
+ try: # check that NODE_OK has attr 'nodes'
370
+ node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas)
371
+ except AttributeError:
372
+ node_ok_shorter = False
373
+ if node_ok_shorter:
374
+ my_nodes = (n for n in self.NODE_OK.nodes if n in self._atlas)
375
+ else:
376
+ my_nodes = (n for n in self._atlas if self.NODE_OK(n))
377
+ for n in my_nodes:
378
+ some_keys_ok = False
379
+ for key in self._atlas[n]:
380
+ if self.EDGE_OK(n, key):
381
+ some_keys_ok = True
382
+ break
383
+ if some_keys_ok is True:
384
+ yield n
385
+
386
+ def __getitem__(self, nbr):
387
+ if nbr in self._atlas and self.NODE_OK(nbr):
388
+
389
+ def new_node_ok(key):
390
+ return self.EDGE_OK(nbr, key)
391
+
392
+ return FilterAtlas(self._atlas[nbr], new_node_ok)
393
+ raise KeyError(f"Key {nbr} not found")
394
+
395
+
396
+ class FilterMultiAdjacency(FilterAdjacency): # multiedgedict
397
+ """A read-only Mapping of Mappings with filtering criteria
398
+ for nodes and edges.
399
+
400
+ It is a view into a dict-of-dict-of-dict-of-dict data structure,
401
+ and it selects nodes and edges that satisfy specific criteria
402
+ defined by ``NODE_OK`` and ``EDGE_OK``, respectively.
403
+
404
+ See Also
405
+ ========
406
+ FilterAtlas
407
+ FilterAdjacency
408
+ FilterMultiInner
409
+ """
410
+
411
+ def __getitem__(self, node):
412
+ if node in self._atlas and self.NODE_OK(node):
413
+
414
+ def edge_ok(nbr, key):
415
+ return self.NODE_OK(nbr) and self.EDGE_OK(node, nbr, key)
416
+
417
+ return FilterMultiInner(self._atlas[node], self.NODE_OK, edge_ok)
418
+ raise KeyError(f"Key {node} not found")
venv/lib/python3.10/site-packages/networkx/classes/reportviews.py ADDED
@@ -0,0 +1,1438 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ View Classes provide node, edge and degree "views" of a graph.
3
+
4
+ Views for nodes, edges and degree are provided for all base graph classes.
5
+ A view means a read-only object that is quick to create, automatically
6
+ updated when the graph changes, and provides basic access like `n in V`,
7
+ `for n in V`, `V[n]` and sometimes set operations.
8
+
9
+ The views are read-only iterable containers that are updated as the
10
+ graph is updated. As with dicts, the graph should not be updated
11
+ while iterating through the view. Views can be iterated multiple times.
12
+
13
+ Edge and Node views also allow data attribute lookup.
14
+ The resulting attribute dict is writable as `G.edges[3, 4]['color']='red'`
15
+ Degree views allow lookup of degree values for single nodes.
16
+ Weighted degree is supported with the `weight` argument.
17
+
18
+ NodeView
19
+ ========
20
+
21
+ `V = G.nodes` (or `V = G.nodes()`) allows `len(V)`, `n in V`, set
22
+ operations e.g. "G.nodes & H.nodes", and `dd = G.nodes[n]`, where
23
+ `dd` is the node data dict. Iteration is over the nodes by default.
24
+
25
+ NodeDataView
26
+ ============
27
+
28
+ To iterate over (node, data) pairs, use arguments to `G.nodes()`
29
+ to create a DataView e.g. `DV = G.nodes(data='color', default='red')`.
30
+ The DataView iterates as `for n, color in DV` and allows
31
+ `(n, 'red') in DV`. Using `DV = G.nodes(data=True)`, the DataViews
32
+ use the full datadict in writeable form also allowing contain testing as
33
+ `(n, {'color': 'red'}) in VD`. DataViews allow set operations when
34
+ data attributes are hashable.
35
+
36
+ DegreeView
37
+ ==========
38
+
39
+ `V = G.degree` allows iteration over (node, degree) pairs as well
40
+ as lookup: `deg=V[n]`. There are many flavors of DegreeView
41
+ for In/Out/Directed/Multi. For Directed Graphs, `G.degree`
42
+ counts both in and out going edges. `G.out_degree` and
43
+ `G.in_degree` count only specific directions.
44
+ Weighted degree using edge data attributes is provide via
45
+ `V = G.degree(weight='attr_name')` where any string with the
46
+ attribute name can be used. `weight=None` is the default.
47
+ No set operations are implemented for degrees, use NodeView.
48
+
49
+ The argument `nbunch` restricts iteration to nodes in nbunch.
50
+ The DegreeView can still lookup any node even if nbunch is specified.
51
+
52
+ EdgeView
53
+ ========
54
+
55
+ `V = G.edges` or `V = G.edges()` allows iteration over edges as well as
56
+ `e in V`, set operations and edge data lookup `dd = G.edges[2, 3]`.
57
+ Iteration is over 2-tuples `(u, v)` for Graph/DiGraph. For multigraphs
58
+ edges 3-tuples `(u, v, key)` are the default but 2-tuples can be obtained
59
+ via `V = G.edges(keys=False)`.
60
+
61
+ Set operations for directed graphs treat the edges as a set of 2-tuples.
62
+ For undirected graphs, 2-tuples are not a unique representation of edges.
63
+ So long as the set being compared to contains unique representations
64
+ of its edges, the set operations will act as expected. If the other
65
+ set contains both `(0, 1)` and `(1, 0)` however, the result of set
66
+ operations may contain both representations of the same edge.
67
+
68
+ EdgeDataView
69
+ ============
70
+
71
+ Edge data can be reported using an EdgeDataView typically created
72
+ by calling an EdgeView: `DV = G.edges(data='weight', default=1)`.
73
+ The EdgeDataView allows iteration over edge tuples, membership checking
74
+ but no set operations.
75
+
76
+ Iteration depends on `data` and `default` and for multigraph `keys`
77
+ If `data is False` (the default) then iterate over 2-tuples `(u, v)`.
78
+ If `data is True` iterate over 3-tuples `(u, v, datadict)`.
79
+ Otherwise iterate over `(u, v, datadict.get(data, default))`.
80
+ For Multigraphs, if `keys is True`, replace `u, v` with `u, v, key`
81
+ to create 3-tuples and 4-tuples.
82
+
83
+ The argument `nbunch` restricts edges to those incident to nodes in nbunch.
84
+ """
85
+ from collections.abc import Mapping, Set
86
+
87
+ import networkx as nx
88
+
89
+ __all__ = [
90
+ "NodeView",
91
+ "NodeDataView",
92
+ "EdgeView",
93
+ "OutEdgeView",
94
+ "InEdgeView",
95
+ "EdgeDataView",
96
+ "OutEdgeDataView",
97
+ "InEdgeDataView",
98
+ "MultiEdgeView",
99
+ "OutMultiEdgeView",
100
+ "InMultiEdgeView",
101
+ "MultiEdgeDataView",
102
+ "OutMultiEdgeDataView",
103
+ "InMultiEdgeDataView",
104
+ "DegreeView",
105
+ "DiDegreeView",
106
+ "InDegreeView",
107
+ "OutDegreeView",
108
+ "MultiDegreeView",
109
+ "DiMultiDegreeView",
110
+ "InMultiDegreeView",
111
+ "OutMultiDegreeView",
112
+ ]
113
+
114
+
115
+ # NodeViews
116
+ class NodeView(Mapping, Set):
117
+ """A NodeView class to act as G.nodes for a NetworkX Graph
118
+
119
+ Set operations act on the nodes without considering data.
120
+ Iteration is over nodes. Node data can be looked up like a dict.
121
+ Use NodeDataView to iterate over node data or to specify a data
122
+ attribute for lookup. NodeDataView is created by calling the NodeView.
123
+
124
+ Parameters
125
+ ----------
126
+ graph : NetworkX graph-like class
127
+
128
+ Examples
129
+ --------
130
+ >>> G = nx.path_graph(3)
131
+ >>> NV = G.nodes()
132
+ >>> 2 in NV
133
+ True
134
+ >>> for n in NV:
135
+ ... print(n)
136
+ 0
137
+ 1
138
+ 2
139
+ >>> assert NV & {1, 2, 3} == {1, 2}
140
+
141
+ >>> G.add_node(2, color="blue")
142
+ >>> NV[2]
143
+ {'color': 'blue'}
144
+ >>> G.add_node(8, color="red")
145
+ >>> NDV = G.nodes(data=True)
146
+ >>> (2, NV[2]) in NDV
147
+ True
148
+ >>> for n, dd in NDV:
149
+ ... print((n, dd.get("color", "aqua")))
150
+ (0, 'aqua')
151
+ (1, 'aqua')
152
+ (2, 'blue')
153
+ (8, 'red')
154
+ >>> NDV[2] == NV[2]
155
+ True
156
+
157
+ >>> NVdata = G.nodes(data="color", default="aqua")
158
+ >>> (2, NVdata[2]) in NVdata
159
+ True
160
+ >>> for n, dd in NVdata:
161
+ ... print((n, dd))
162
+ (0, 'aqua')
163
+ (1, 'aqua')
164
+ (2, 'blue')
165
+ (8, 'red')
166
+ >>> NVdata[2] == NV[2] # NVdata gets 'color', NV gets datadict
167
+ False
168
+ """
169
+
170
+ __slots__ = ("_nodes",)
171
+
172
+ def __getstate__(self):
173
+ return {"_nodes": self._nodes}
174
+
175
+ def __setstate__(self, state):
176
+ self._nodes = state["_nodes"]
177
+
178
+ def __init__(self, graph):
179
+ self._nodes = graph._node
180
+
181
+ # Mapping methods
182
+ def __len__(self):
183
+ return len(self._nodes)
184
+
185
+ def __iter__(self):
186
+ return iter(self._nodes)
187
+
188
+ def __getitem__(self, n):
189
+ if isinstance(n, slice):
190
+ raise nx.NetworkXError(
191
+ f"{type(self).__name__} does not support slicing, "
192
+ f"try list(G.nodes)[{n.start}:{n.stop}:{n.step}]"
193
+ )
194
+ return self._nodes[n]
195
+
196
+ # Set methods
197
+ def __contains__(self, n):
198
+ return n in self._nodes
199
+
200
+ @classmethod
201
+ def _from_iterable(cls, it):
202
+ return set(it)
203
+
204
+ # DataView method
205
+ def __call__(self, data=False, default=None):
206
+ if data is False:
207
+ return self
208
+ return NodeDataView(self._nodes, data, default)
209
+
210
+ def data(self, data=True, default=None):
211
+ """
212
+ Return a read-only view of node data.
213
+
214
+ Parameters
215
+ ----------
216
+ data : bool or node data key, default=True
217
+ If ``data=True`` (the default), return a `NodeDataView` object that
218
+ maps each node to *all* of its attributes. `data` may also be an
219
+ arbitrary key, in which case the `NodeDataView` maps each node to
220
+ the value for the keyed attribute. In this case, if a node does
221
+ not have the `data` attribute, the `default` value is used.
222
+ default : object, default=None
223
+ The value used when a node does not have a specific attribute.
224
+
225
+ Returns
226
+ -------
227
+ NodeDataView
228
+ The layout of the returned NodeDataView depends on the value of the
229
+ `data` parameter.
230
+
231
+ Notes
232
+ -----
233
+ If ``data=False``, returns a `NodeView` object without data.
234
+
235
+ See Also
236
+ --------
237
+ NodeDataView
238
+
239
+ Examples
240
+ --------
241
+ >>> G = nx.Graph()
242
+ >>> G.add_nodes_from(
243
+ ... [
244
+ ... (0, {"color": "red", "weight": 10}),
245
+ ... (1, {"color": "blue"}),
246
+ ... (2, {"color": "yellow", "weight": 2}),
247
+ ... ]
248
+ ... )
249
+
250
+ Accessing node data with ``data=True`` (the default) returns a
251
+ NodeDataView mapping each node to all of its attributes:
252
+
253
+ >>> G.nodes.data()
254
+ NodeDataView({0: {'color': 'red', 'weight': 10}, 1: {'color': 'blue'}, 2: {'color': 'yellow', 'weight': 2}})
255
+
256
+ If `data` represents a key in the node attribute dict, a NodeDataView mapping
257
+ the nodes to the value for that specific key is returned:
258
+
259
+ >>> G.nodes.data("color")
260
+ NodeDataView({0: 'red', 1: 'blue', 2: 'yellow'}, data='color')
261
+
262
+ If a specific key is not found in an attribute dict, the value specified
263
+ by `default` is returned:
264
+
265
+ >>> G.nodes.data("weight", default=-999)
266
+ NodeDataView({0: 10, 1: -999, 2: 2}, data='weight')
267
+
268
+ Note that there is no check that the `data` key is in any of the
269
+ node attribute dictionaries:
270
+
271
+ >>> G.nodes.data("height")
272
+ NodeDataView({0: None, 1: None, 2: None}, data='height')
273
+ """
274
+ if data is False:
275
+ return self
276
+ return NodeDataView(self._nodes, data, default)
277
+
278
+ def __str__(self):
279
+ return str(list(self))
280
+
281
+ def __repr__(self):
282
+ return f"{self.__class__.__name__}({tuple(self)})"
283
+
284
+
285
+ class NodeDataView(Set):
286
+ """A DataView class for nodes of a NetworkX Graph
287
+
288
+ The main use for this class is to iterate through node-data pairs.
289
+ The data can be the entire data-dictionary for each node, or it
290
+ can be a specific attribute (with default) for each node.
291
+ Set operations are enabled with NodeDataView, but don't work in
292
+ cases where the data is not hashable. Use with caution.
293
+ Typically, set operations on nodes use NodeView, not NodeDataView.
294
+ That is, they use `G.nodes` instead of `G.nodes(data='foo')`.
295
+
296
+ Parameters
297
+ ==========
298
+ graph : NetworkX graph-like class
299
+ data : bool or string (default=False)
300
+ default : object (default=None)
301
+ """
302
+
303
+ __slots__ = ("_nodes", "_data", "_default")
304
+
305
+ def __getstate__(self):
306
+ return {"_nodes": self._nodes, "_data": self._data, "_default": self._default}
307
+
308
+ def __setstate__(self, state):
309
+ self._nodes = state["_nodes"]
310
+ self._data = state["_data"]
311
+ self._default = state["_default"]
312
+
313
+ def __init__(self, nodedict, data=False, default=None):
314
+ self._nodes = nodedict
315
+ self._data = data
316
+ self._default = default
317
+
318
+ @classmethod
319
+ def _from_iterable(cls, it):
320
+ try:
321
+ return set(it)
322
+ except TypeError as err:
323
+ if "unhashable" in str(err):
324
+ msg = " : Could be b/c data=True or your values are unhashable"
325
+ raise TypeError(str(err) + msg) from err
326
+ raise
327
+
328
+ def __len__(self):
329
+ return len(self._nodes)
330
+
331
+ def __iter__(self):
332
+ data = self._data
333
+ if data is False:
334
+ return iter(self._nodes)
335
+ if data is True:
336
+ return iter(self._nodes.items())
337
+ return (
338
+ (n, dd[data] if data in dd else self._default)
339
+ for n, dd in self._nodes.items()
340
+ )
341
+
342
+ def __contains__(self, n):
343
+ try:
344
+ node_in = n in self._nodes
345
+ except TypeError:
346
+ n, d = n
347
+ return n in self._nodes and self[n] == d
348
+ if node_in is True:
349
+ return node_in
350
+ try:
351
+ n, d = n
352
+ except (TypeError, ValueError):
353
+ return False
354
+ return n in self._nodes and self[n] == d
355
+
356
+ def __getitem__(self, n):
357
+ if isinstance(n, slice):
358
+ raise nx.NetworkXError(
359
+ f"{type(self).__name__} does not support slicing, "
360
+ f"try list(G.nodes.data())[{n.start}:{n.stop}:{n.step}]"
361
+ )
362
+ ddict = self._nodes[n]
363
+ data = self._data
364
+ if data is False or data is True:
365
+ return ddict
366
+ return ddict[data] if data in ddict else self._default
367
+
368
+ def __str__(self):
369
+ return str(list(self))
370
+
371
+ def __repr__(self):
372
+ name = self.__class__.__name__
373
+ if self._data is False:
374
+ return f"{name}({tuple(self)})"
375
+ if self._data is True:
376
+ return f"{name}({dict(self)})"
377
+ return f"{name}({dict(self)}, data={self._data!r})"
378
+
379
+
380
+ # DegreeViews
381
+ class DiDegreeView:
382
+ """A View class for degree of nodes in a NetworkX Graph
383
+
384
+ The functionality is like dict.items() with (node, degree) pairs.
385
+ Additional functionality includes read-only lookup of node degree,
386
+ and calling with optional features nbunch (for only a subset of nodes)
387
+ and weight (use edge weights to compute degree).
388
+
389
+ Parameters
390
+ ==========
391
+ graph : NetworkX graph-like class
392
+ nbunch : node, container of nodes, or None meaning all nodes (default=None)
393
+ weight : bool or string (default=None)
394
+
395
+ Notes
396
+ -----
397
+ DegreeView can still lookup any node even if nbunch is specified.
398
+
399
+ Examples
400
+ --------
401
+ >>> G = nx.path_graph(3)
402
+ >>> DV = G.degree()
403
+ >>> assert DV[2] == 1
404
+ >>> assert sum(deg for n, deg in DV) == 4
405
+
406
+ >>> DVweight = G.degree(weight="span")
407
+ >>> G.add_edge(1, 2, span=34)
408
+ >>> DVweight[2]
409
+ 34
410
+ >>> DVweight[0] # default edge weight is 1
411
+ 1
412
+ >>> sum(span for n, span in DVweight) # sum weighted degrees
413
+ 70
414
+
415
+ >>> DVnbunch = G.degree(nbunch=(1, 2))
416
+ >>> assert len(list(DVnbunch)) == 2 # iteration over nbunch only
417
+ """
418
+
419
+ def __init__(self, G, nbunch=None, weight=None):
420
+ self._graph = G
421
+ self._succ = G._succ if hasattr(G, "_succ") else G._adj
422
+ self._pred = G._pred if hasattr(G, "_pred") else G._adj
423
+ self._nodes = self._succ if nbunch is None else list(G.nbunch_iter(nbunch))
424
+ self._weight = weight
425
+
426
+ def __call__(self, nbunch=None, weight=None):
427
+ if nbunch is None:
428
+ if weight == self._weight:
429
+ return self
430
+ return self.__class__(self._graph, None, weight)
431
+ try:
432
+ if nbunch in self._nodes:
433
+ if weight == self._weight:
434
+ return self[nbunch]
435
+ return self.__class__(self._graph, None, weight)[nbunch]
436
+ except TypeError:
437
+ pass
438
+ return self.__class__(self._graph, nbunch, weight)
439
+
440
+ def __getitem__(self, n):
441
+ weight = self._weight
442
+ succs = self._succ[n]
443
+ preds = self._pred[n]
444
+ if weight is None:
445
+ return len(succs) + len(preds)
446
+ return sum(dd.get(weight, 1) for dd in succs.values()) + sum(
447
+ dd.get(weight, 1) for dd in preds.values()
448
+ )
449
+
450
+ def __iter__(self):
451
+ weight = self._weight
452
+ if weight is None:
453
+ for n in self._nodes:
454
+ succs = self._succ[n]
455
+ preds = self._pred[n]
456
+ yield (n, len(succs) + len(preds))
457
+ else:
458
+ for n in self._nodes:
459
+ succs = self._succ[n]
460
+ preds = self._pred[n]
461
+ deg = sum(dd.get(weight, 1) for dd in succs.values()) + sum(
462
+ dd.get(weight, 1) for dd in preds.values()
463
+ )
464
+ yield (n, deg)
465
+
466
+ def __len__(self):
467
+ return len(self._nodes)
468
+
469
+ def __str__(self):
470
+ return str(list(self))
471
+
472
+ def __repr__(self):
473
+ return f"{self.__class__.__name__}({dict(self)})"
474
+
475
+
476
+ class DegreeView(DiDegreeView):
477
+ """A DegreeView class to act as G.degree for a NetworkX Graph
478
+
479
+ Typical usage focuses on iteration over `(node, degree)` pairs.
480
+ The degree is by default the number of edges incident to the node.
481
+ Optional argument `weight` enables weighted degree using the edge
482
+ attribute named in the `weight` argument. Reporting and iteration
483
+ can also be restricted to a subset of nodes using `nbunch`.
484
+
485
+ Additional functionality include node lookup so that `G.degree[n]`
486
+ reported the (possibly weighted) degree of node `n`. Calling the
487
+ view creates a view with different arguments `nbunch` or `weight`.
488
+
489
+ Parameters
490
+ ==========
491
+ graph : NetworkX graph-like class
492
+ nbunch : node, container of nodes, or None meaning all nodes (default=None)
493
+ weight : string or None (default=None)
494
+
495
+ Notes
496
+ -----
497
+ DegreeView can still lookup any node even if nbunch is specified.
498
+
499
+ Examples
500
+ --------
501
+ >>> G = nx.path_graph(3)
502
+ >>> DV = G.degree()
503
+ >>> assert DV[2] == 1
504
+ >>> assert G.degree[2] == 1
505
+ >>> assert sum(deg for n, deg in DV) == 4
506
+
507
+ >>> DVweight = G.degree(weight="span")
508
+ >>> G.add_edge(1, 2, span=34)
509
+ >>> DVweight[2]
510
+ 34
511
+ >>> DVweight[0] # default edge weight is 1
512
+ 1
513
+ >>> sum(span for n, span in DVweight) # sum weighted degrees
514
+ 70
515
+
516
+ >>> DVnbunch = G.degree(nbunch=(1, 2))
517
+ >>> assert len(list(DVnbunch)) == 2 # iteration over nbunch only
518
+ """
519
+
520
+ def __getitem__(self, n):
521
+ weight = self._weight
522
+ nbrs = self._succ[n]
523
+ if weight is None:
524
+ return len(nbrs) + (n in nbrs)
525
+ return sum(dd.get(weight, 1) for dd in nbrs.values()) + (
526
+ n in nbrs and nbrs[n].get(weight, 1)
527
+ )
528
+
529
+ def __iter__(self):
530
+ weight = self._weight
531
+ if weight is None:
532
+ for n in self._nodes:
533
+ nbrs = self._succ[n]
534
+ yield (n, len(nbrs) + (n in nbrs))
535
+ else:
536
+ for n in self._nodes:
537
+ nbrs = self._succ[n]
538
+ deg = sum(dd.get(weight, 1) for dd in nbrs.values()) + (
539
+ n in nbrs and nbrs[n].get(weight, 1)
540
+ )
541
+ yield (n, deg)
542
+
543
+
544
+ class OutDegreeView(DiDegreeView):
545
+ """A DegreeView class to report out_degree for a DiGraph; See DegreeView"""
546
+
547
+ def __getitem__(self, n):
548
+ weight = self._weight
549
+ nbrs = self._succ[n]
550
+ if self._weight is None:
551
+ return len(nbrs)
552
+ return sum(dd.get(self._weight, 1) for dd in nbrs.values())
553
+
554
+ def __iter__(self):
555
+ weight = self._weight
556
+ if weight is None:
557
+ for n in self._nodes:
558
+ succs = self._succ[n]
559
+ yield (n, len(succs))
560
+ else:
561
+ for n in self._nodes:
562
+ succs = self._succ[n]
563
+ deg = sum(dd.get(weight, 1) for dd in succs.values())
564
+ yield (n, deg)
565
+
566
+
567
+ class InDegreeView(DiDegreeView):
568
+ """A DegreeView class to report in_degree for a DiGraph; See DegreeView"""
569
+
570
+ def __getitem__(self, n):
571
+ weight = self._weight
572
+ nbrs = self._pred[n]
573
+ if weight is None:
574
+ return len(nbrs)
575
+ return sum(dd.get(weight, 1) for dd in nbrs.values())
576
+
577
+ def __iter__(self):
578
+ weight = self._weight
579
+ if weight is None:
580
+ for n in self._nodes:
581
+ preds = self._pred[n]
582
+ yield (n, len(preds))
583
+ else:
584
+ for n in self._nodes:
585
+ preds = self._pred[n]
586
+ deg = sum(dd.get(weight, 1) for dd in preds.values())
587
+ yield (n, deg)
588
+
589
+
590
+ class MultiDegreeView(DiDegreeView):
591
+ """A DegreeView class for undirected multigraphs; See DegreeView"""
592
+
593
+ def __getitem__(self, n):
594
+ weight = self._weight
595
+ nbrs = self._succ[n]
596
+ if weight is None:
597
+ return sum(len(keys) for keys in nbrs.values()) + (
598
+ n in nbrs and len(nbrs[n])
599
+ )
600
+ # edge weighted graph - degree is sum of nbr edge weights
601
+ deg = sum(
602
+ d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values()
603
+ )
604
+ if n in nbrs:
605
+ deg += sum(d.get(weight, 1) for d in nbrs[n].values())
606
+ return deg
607
+
608
+ def __iter__(self):
609
+ weight = self._weight
610
+ if weight is None:
611
+ for n in self._nodes:
612
+ nbrs = self._succ[n]
613
+ deg = sum(len(keys) for keys in nbrs.values()) + (
614
+ n in nbrs and len(nbrs[n])
615
+ )
616
+ yield (n, deg)
617
+ else:
618
+ for n in self._nodes:
619
+ nbrs = self._succ[n]
620
+ deg = sum(
621
+ d.get(weight, 1)
622
+ for key_dict in nbrs.values()
623
+ for d in key_dict.values()
624
+ )
625
+ if n in nbrs:
626
+ deg += sum(d.get(weight, 1) for d in nbrs[n].values())
627
+ yield (n, deg)
628
+
629
+
630
+ class DiMultiDegreeView(DiDegreeView):
631
+ """A DegreeView class for MultiDiGraph; See DegreeView"""
632
+
633
+ def __getitem__(self, n):
634
+ weight = self._weight
635
+ succs = self._succ[n]
636
+ preds = self._pred[n]
637
+ if weight is None:
638
+ return sum(len(keys) for keys in succs.values()) + sum(
639
+ len(keys) for keys in preds.values()
640
+ )
641
+ # edge weighted graph - degree is sum of nbr edge weights
642
+ deg = sum(
643
+ d.get(weight, 1) for key_dict in succs.values() for d in key_dict.values()
644
+ ) + sum(
645
+ d.get(weight, 1) for key_dict in preds.values() for d in key_dict.values()
646
+ )
647
+ return deg
648
+
649
+ def __iter__(self):
650
+ weight = self._weight
651
+ if weight is None:
652
+ for n in self._nodes:
653
+ succs = self._succ[n]
654
+ preds = self._pred[n]
655
+ deg = sum(len(keys) for keys in succs.values()) + sum(
656
+ len(keys) for keys in preds.values()
657
+ )
658
+ yield (n, deg)
659
+ else:
660
+ for n in self._nodes:
661
+ succs = self._succ[n]
662
+ preds = self._pred[n]
663
+ deg = sum(
664
+ d.get(weight, 1)
665
+ for key_dict in succs.values()
666
+ for d in key_dict.values()
667
+ ) + sum(
668
+ d.get(weight, 1)
669
+ for key_dict in preds.values()
670
+ for d in key_dict.values()
671
+ )
672
+ yield (n, deg)
673
+
674
+
675
+ class InMultiDegreeView(DiDegreeView):
676
+ """A DegreeView class for inward degree of MultiDiGraph; See DegreeView"""
677
+
678
+ def __getitem__(self, n):
679
+ weight = self._weight
680
+ nbrs = self._pred[n]
681
+ if weight is None:
682
+ return sum(len(data) for data in nbrs.values())
683
+ # edge weighted graph - degree is sum of nbr edge weights
684
+ return sum(
685
+ d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values()
686
+ )
687
+
688
+ def __iter__(self):
689
+ weight = self._weight
690
+ if weight is None:
691
+ for n in self._nodes:
692
+ nbrs = self._pred[n]
693
+ deg = sum(len(data) for data in nbrs.values())
694
+ yield (n, deg)
695
+ else:
696
+ for n in self._nodes:
697
+ nbrs = self._pred[n]
698
+ deg = sum(
699
+ d.get(weight, 1)
700
+ for key_dict in nbrs.values()
701
+ for d in key_dict.values()
702
+ )
703
+ yield (n, deg)
704
+
705
+
706
+ class OutMultiDegreeView(DiDegreeView):
707
+ """A DegreeView class for outward degree of MultiDiGraph; See DegreeView"""
708
+
709
+ def __getitem__(self, n):
710
+ weight = self._weight
711
+ nbrs = self._succ[n]
712
+ if weight is None:
713
+ return sum(len(data) for data in nbrs.values())
714
+ # edge weighted graph - degree is sum of nbr edge weights
715
+ return sum(
716
+ d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values()
717
+ )
718
+
719
+ def __iter__(self):
720
+ weight = self._weight
721
+ if weight is None:
722
+ for n in self._nodes:
723
+ nbrs = self._succ[n]
724
+ deg = sum(len(data) for data in nbrs.values())
725
+ yield (n, deg)
726
+ else:
727
+ for n in self._nodes:
728
+ nbrs = self._succ[n]
729
+ deg = sum(
730
+ d.get(weight, 1)
731
+ for key_dict in nbrs.values()
732
+ for d in key_dict.values()
733
+ )
734
+ yield (n, deg)
735
+
736
+
737
+ # EdgeDataViews
738
+ class OutEdgeDataView:
739
+ """EdgeDataView for outward edges of DiGraph; See EdgeDataView"""
740
+
741
+ __slots__ = (
742
+ "_viewer",
743
+ "_nbunch",
744
+ "_data",
745
+ "_default",
746
+ "_adjdict",
747
+ "_nodes_nbrs",
748
+ "_report",
749
+ )
750
+
751
+ def __getstate__(self):
752
+ return {
753
+ "viewer": self._viewer,
754
+ "nbunch": self._nbunch,
755
+ "data": self._data,
756
+ "default": self._default,
757
+ }
758
+
759
+ def __setstate__(self, state):
760
+ self.__init__(**state)
761
+
762
+ def __init__(self, viewer, nbunch=None, data=False, *, default=None):
763
+ self._viewer = viewer
764
+ adjdict = self._adjdict = viewer._adjdict
765
+ if nbunch is None:
766
+ self._nodes_nbrs = adjdict.items
767
+ else:
768
+ # dict retains order of nodes but acts like a set
769
+ nbunch = dict.fromkeys(viewer._graph.nbunch_iter(nbunch))
770
+ self._nodes_nbrs = lambda: [(n, adjdict[n]) for n in nbunch]
771
+ self._nbunch = nbunch
772
+ self._data = data
773
+ self._default = default
774
+ # Set _report based on data and default
775
+ if data is True:
776
+ self._report = lambda n, nbr, dd: (n, nbr, dd)
777
+ elif data is False:
778
+ self._report = lambda n, nbr, dd: (n, nbr)
779
+ else: # data is attribute name
780
+ self._report = (
781
+ lambda n, nbr, dd: (n, nbr, dd[data])
782
+ if data in dd
783
+ else (n, nbr, default)
784
+ )
785
+
786
+ def __len__(self):
787
+ return sum(len(nbrs) for n, nbrs in self._nodes_nbrs())
788
+
789
+ def __iter__(self):
790
+ return (
791
+ self._report(n, nbr, dd)
792
+ for n, nbrs in self._nodes_nbrs()
793
+ for nbr, dd in nbrs.items()
794
+ )
795
+
796
+ def __contains__(self, e):
797
+ u, v = e[:2]
798
+ if self._nbunch is not None and u not in self._nbunch:
799
+ return False # this edge doesn't start in nbunch
800
+ try:
801
+ ddict = self._adjdict[u][v]
802
+ except KeyError:
803
+ return False
804
+ return e == self._report(u, v, ddict)
805
+
806
+ def __str__(self):
807
+ return str(list(self))
808
+
809
+ def __repr__(self):
810
+ return f"{self.__class__.__name__}({list(self)})"
811
+
812
+
813
+ class EdgeDataView(OutEdgeDataView):
814
+ """A EdgeDataView class for edges of Graph
815
+
816
+ This view is primarily used to iterate over the edges reporting
817
+ edges as node-tuples with edge data optionally reported. The
818
+ argument `nbunch` allows restriction to edges incident to nodes
819
+ in that container/singleton. The default (nbunch=None)
820
+ reports all edges. The arguments `data` and `default` control
821
+ what edge data is reported. The default `data is False` reports
822
+ only node-tuples for each edge. If `data is True` the entire edge
823
+ data dict is returned. Otherwise `data` is assumed to hold the name
824
+ of the edge attribute to report with default `default` if that
825
+ edge attribute is not present.
826
+
827
+ Parameters
828
+ ----------
829
+ nbunch : container of nodes, node or None (default None)
830
+ data : False, True or string (default False)
831
+ default : default value (default None)
832
+
833
+ Examples
834
+ --------
835
+ >>> G = nx.path_graph(3)
836
+ >>> G.add_edge(1, 2, foo="bar")
837
+ >>> list(G.edges(data="foo", default="biz"))
838
+ [(0, 1, 'biz'), (1, 2, 'bar')]
839
+ >>> assert (0, 1, "biz") in G.edges(data="foo", default="biz")
840
+ """
841
+
842
+ __slots__ = ()
843
+
844
+ def __len__(self):
845
+ return sum(1 for e in self)
846
+
847
+ def __iter__(self):
848
+ seen = {}
849
+ for n, nbrs in self._nodes_nbrs():
850
+ for nbr, dd in nbrs.items():
851
+ if nbr not in seen:
852
+ yield self._report(n, nbr, dd)
853
+ seen[n] = 1
854
+ del seen
855
+
856
+ def __contains__(self, e):
857
+ u, v = e[:2]
858
+ if self._nbunch is not None and u not in self._nbunch and v not in self._nbunch:
859
+ return False # this edge doesn't start and it doesn't end in nbunch
860
+ try:
861
+ ddict = self._adjdict[u][v]
862
+ except KeyError:
863
+ return False
864
+ return e == self._report(u, v, ddict)
865
+
866
+
867
+ class InEdgeDataView(OutEdgeDataView):
868
+ """An EdgeDataView class for outward edges of DiGraph; See EdgeDataView"""
869
+
870
+ __slots__ = ()
871
+
872
+ def __iter__(self):
873
+ return (
874
+ self._report(nbr, n, dd)
875
+ for n, nbrs in self._nodes_nbrs()
876
+ for nbr, dd in nbrs.items()
877
+ )
878
+
879
+ def __contains__(self, e):
880
+ u, v = e[:2]
881
+ if self._nbunch is not None and v not in self._nbunch:
882
+ return False # this edge doesn't end in nbunch
883
+ try:
884
+ ddict = self._adjdict[v][u]
885
+ except KeyError:
886
+ return False
887
+ return e == self._report(u, v, ddict)
888
+
889
+
890
+ class OutMultiEdgeDataView(OutEdgeDataView):
891
+ """An EdgeDataView for outward edges of MultiDiGraph; See EdgeDataView"""
892
+
893
+ __slots__ = ("keys",)
894
+
895
+ def __getstate__(self):
896
+ return {
897
+ "viewer": self._viewer,
898
+ "nbunch": self._nbunch,
899
+ "keys": self.keys,
900
+ "data": self._data,
901
+ "default": self._default,
902
+ }
903
+
904
+ def __setstate__(self, state):
905
+ self.__init__(**state)
906
+
907
+ def __init__(self, viewer, nbunch=None, data=False, *, default=None, keys=False):
908
+ self._viewer = viewer
909
+ adjdict = self._adjdict = viewer._adjdict
910
+ self.keys = keys
911
+ if nbunch is None:
912
+ self._nodes_nbrs = adjdict.items
913
+ else:
914
+ # dict retains order of nodes but acts like a set
915
+ nbunch = dict.fromkeys(viewer._graph.nbunch_iter(nbunch))
916
+ self._nodes_nbrs = lambda: [(n, adjdict[n]) for n in nbunch]
917
+ self._nbunch = nbunch
918
+ self._data = data
919
+ self._default = default
920
+ # Set _report based on data and default
921
+ if data is True:
922
+ if keys is True:
923
+ self._report = lambda n, nbr, k, dd: (n, nbr, k, dd)
924
+ else:
925
+ self._report = lambda n, nbr, k, dd: (n, nbr, dd)
926
+ elif data is False:
927
+ if keys is True:
928
+ self._report = lambda n, nbr, k, dd: (n, nbr, k)
929
+ else:
930
+ self._report = lambda n, nbr, k, dd: (n, nbr)
931
+ else: # data is attribute name
932
+ if keys is True:
933
+ self._report = (
934
+ lambda n, nbr, k, dd: (n, nbr, k, dd[data])
935
+ if data in dd
936
+ else (n, nbr, k, default)
937
+ )
938
+ else:
939
+ self._report = (
940
+ lambda n, nbr, k, dd: (n, nbr, dd[data])
941
+ if data in dd
942
+ else (n, nbr, default)
943
+ )
944
+
945
+ def __len__(self):
946
+ return sum(1 for e in self)
947
+
948
+ def __iter__(self):
949
+ return (
950
+ self._report(n, nbr, k, dd)
951
+ for n, nbrs in self._nodes_nbrs()
952
+ for nbr, kd in nbrs.items()
953
+ for k, dd in kd.items()
954
+ )
955
+
956
+ def __contains__(self, e):
957
+ u, v = e[:2]
958
+ if self._nbunch is not None and u not in self._nbunch:
959
+ return False # this edge doesn't start in nbunch
960
+ try:
961
+ kdict = self._adjdict[u][v]
962
+ except KeyError:
963
+ return False
964
+ if self.keys is True:
965
+ k = e[2]
966
+ try:
967
+ dd = kdict[k]
968
+ except KeyError:
969
+ return False
970
+ return e == self._report(u, v, k, dd)
971
+ return any(e == self._report(u, v, k, dd) for k, dd in kdict.items())
972
+
973
+
974
+ class MultiEdgeDataView(OutMultiEdgeDataView):
975
+ """An EdgeDataView class for edges of MultiGraph; See EdgeDataView"""
976
+
977
+ __slots__ = ()
978
+
979
+ def __iter__(self):
980
+ seen = {}
981
+ for n, nbrs in self._nodes_nbrs():
982
+ for nbr, kd in nbrs.items():
983
+ if nbr not in seen:
984
+ for k, dd in kd.items():
985
+ yield self._report(n, nbr, k, dd)
986
+ seen[n] = 1
987
+ del seen
988
+
989
+ def __contains__(self, e):
990
+ u, v = e[:2]
991
+ if self._nbunch is not None and u not in self._nbunch and v not in self._nbunch:
992
+ return False # this edge doesn't start and doesn't end in nbunch
993
+ try:
994
+ kdict = self._adjdict[u][v]
995
+ except KeyError:
996
+ try:
997
+ kdict = self._adjdict[v][u]
998
+ except KeyError:
999
+ return False
1000
+ if self.keys is True:
1001
+ k = e[2]
1002
+ try:
1003
+ dd = kdict[k]
1004
+ except KeyError:
1005
+ return False
1006
+ return e == self._report(u, v, k, dd)
1007
+ return any(e == self._report(u, v, k, dd) for k, dd in kdict.items())
1008
+
1009
+
1010
+ class InMultiEdgeDataView(OutMultiEdgeDataView):
1011
+ """An EdgeDataView for inward edges of MultiDiGraph; See EdgeDataView"""
1012
+
1013
+ __slots__ = ()
1014
+
1015
+ def __iter__(self):
1016
+ return (
1017
+ self._report(nbr, n, k, dd)
1018
+ for n, nbrs in self._nodes_nbrs()
1019
+ for nbr, kd in nbrs.items()
1020
+ for k, dd in kd.items()
1021
+ )
1022
+
1023
+ def __contains__(self, e):
1024
+ u, v = e[:2]
1025
+ if self._nbunch is not None and v not in self._nbunch:
1026
+ return False # this edge doesn't end in nbunch
1027
+ try:
1028
+ kdict = self._adjdict[v][u]
1029
+ except KeyError:
1030
+ return False
1031
+ if self.keys is True:
1032
+ k = e[2]
1033
+ dd = kdict[k]
1034
+ return e == self._report(u, v, k, dd)
1035
+ return any(e == self._report(u, v, k, dd) for k, dd in kdict.items())
1036
+
1037
+
1038
+ # EdgeViews have set operations and no data reported
1039
+ class OutEdgeView(Set, Mapping):
1040
+ """A EdgeView class for outward edges of a DiGraph"""
1041
+
1042
+ __slots__ = ("_adjdict", "_graph", "_nodes_nbrs")
1043
+
1044
+ def __getstate__(self):
1045
+ return {"_graph": self._graph, "_adjdict": self._adjdict}
1046
+
1047
+ def __setstate__(self, state):
1048
+ self._graph = state["_graph"]
1049
+ self._adjdict = state["_adjdict"]
1050
+ self._nodes_nbrs = self._adjdict.items
1051
+
1052
+ @classmethod
1053
+ def _from_iterable(cls, it):
1054
+ return set(it)
1055
+
1056
+ dataview = OutEdgeDataView
1057
+
1058
+ def __init__(self, G):
1059
+ self._graph = G
1060
+ self._adjdict = G._succ if hasattr(G, "succ") else G._adj
1061
+ self._nodes_nbrs = self._adjdict.items
1062
+
1063
+ # Set methods
1064
+ def __len__(self):
1065
+ return sum(len(nbrs) for n, nbrs in self._nodes_nbrs())
1066
+
1067
+ def __iter__(self):
1068
+ for n, nbrs in self._nodes_nbrs():
1069
+ for nbr in nbrs:
1070
+ yield (n, nbr)
1071
+
1072
+ def __contains__(self, e):
1073
+ try:
1074
+ u, v = e
1075
+ return v in self._adjdict[u]
1076
+ except KeyError:
1077
+ return False
1078
+
1079
+ # Mapping Methods
1080
+ def __getitem__(self, e):
1081
+ if isinstance(e, slice):
1082
+ raise nx.NetworkXError(
1083
+ f"{type(self).__name__} does not support slicing, "
1084
+ f"try list(G.edges)[{e.start}:{e.stop}:{e.step}]"
1085
+ )
1086
+ u, v = e
1087
+ try:
1088
+ return self._adjdict[u][v]
1089
+ except KeyError as ex: # Customize msg to indicate exception origin
1090
+ raise KeyError(f"The edge {e} is not in the graph.")
1091
+
1092
+ # EdgeDataView methods
1093
+ def __call__(self, nbunch=None, data=False, *, default=None):
1094
+ if nbunch is None and data is False:
1095
+ return self
1096
+ return self.dataview(self, nbunch, data, default=default)
1097
+
1098
+ def data(self, data=True, default=None, nbunch=None):
1099
+ """
1100
+ Return a read-only view of edge data.
1101
+
1102
+ Parameters
1103
+ ----------
1104
+ data : bool or edge attribute key
1105
+ If ``data=True``, then the data view maps each edge to a dictionary
1106
+ containing all of its attributes. If `data` is a key in the edge
1107
+ dictionary, then the data view maps each edge to its value for
1108
+ the keyed attribute. In this case, if the edge doesn't have the
1109
+ attribute, the `default` value is returned.
1110
+ default : object, default=None
1111
+ The value used when an edge does not have a specific attribute
1112
+ nbunch : container of nodes, optional (default=None)
1113
+ Allows restriction to edges only involving certain nodes. All edges
1114
+ are considered by default.
1115
+
1116
+ Returns
1117
+ -------
1118
+ dataview
1119
+ Returns an `EdgeDataView` for undirected Graphs, `OutEdgeDataView`
1120
+ for DiGraphs, `MultiEdgeDataView` for MultiGraphs and
1121
+ `OutMultiEdgeDataView` for MultiDiGraphs.
1122
+
1123
+ Notes
1124
+ -----
1125
+ If ``data=False``, returns an `EdgeView` without any edge data.
1126
+
1127
+ See Also
1128
+ --------
1129
+ EdgeDataView
1130
+ OutEdgeDataView
1131
+ MultiEdgeDataView
1132
+ OutMultiEdgeDataView
1133
+
1134
+ Examples
1135
+ --------
1136
+ >>> G = nx.Graph()
1137
+ >>> G.add_edges_from(
1138
+ ... [
1139
+ ... (0, 1, {"dist": 3, "capacity": 20}),
1140
+ ... (1, 2, {"dist": 4}),
1141
+ ... (2, 0, {"dist": 5}),
1142
+ ... ]
1143
+ ... )
1144
+
1145
+ Accessing edge data with ``data=True`` (the default) returns an
1146
+ edge data view object listing each edge with all of its attributes:
1147
+
1148
+ >>> G.edges.data()
1149
+ EdgeDataView([(0, 1, {'dist': 3, 'capacity': 20}), (0, 2, {'dist': 5}), (1, 2, {'dist': 4})])
1150
+
1151
+ If `data` represents a key in the edge attribute dict, a dataview listing
1152
+ each edge with its value for that specific key is returned:
1153
+
1154
+ >>> G.edges.data("dist")
1155
+ EdgeDataView([(0, 1, 3), (0, 2, 5), (1, 2, 4)])
1156
+
1157
+ `nbunch` can be used to limit the edges:
1158
+
1159
+ >>> G.edges.data("dist", nbunch=[0])
1160
+ EdgeDataView([(0, 1, 3), (0, 2, 5)])
1161
+
1162
+ If a specific key is not found in an edge attribute dict, the value
1163
+ specified by `default` is used:
1164
+
1165
+ >>> G.edges.data("capacity")
1166
+ EdgeDataView([(0, 1, 20), (0, 2, None), (1, 2, None)])
1167
+
1168
+ Note that there is no check that the `data` key is present in any of
1169
+ the edge attribute dictionaries:
1170
+
1171
+ >>> G.edges.data("speed")
1172
+ EdgeDataView([(0, 1, None), (0, 2, None), (1, 2, None)])
1173
+ """
1174
+ if nbunch is None and data is False:
1175
+ return self
1176
+ return self.dataview(self, nbunch, data, default=default)
1177
+
1178
+ # String Methods
1179
+ def __str__(self):
1180
+ return str(list(self))
1181
+
1182
+ def __repr__(self):
1183
+ return f"{self.__class__.__name__}({list(self)})"
1184
+
1185
+
1186
+ class EdgeView(OutEdgeView):
1187
+ """A EdgeView class for edges of a Graph
1188
+
1189
+ This densely packed View allows iteration over edges, data lookup
1190
+ like a dict and set operations on edges represented by node-tuples.
1191
+ In addition, edge data can be controlled by calling this object
1192
+ possibly creating an EdgeDataView. Typically edges are iterated over
1193
+ and reported as `(u, v)` node tuples or `(u, v, key)` node/key tuples
1194
+ for multigraphs. Those edge representations can also be using to
1195
+ lookup the data dict for any edge. Set operations also are available
1196
+ where those tuples are the elements of the set.
1197
+ Calling this object with optional arguments `data`, `default` and `keys`
1198
+ controls the form of the tuple (see EdgeDataView). Optional argument
1199
+ `nbunch` allows restriction to edges only involving certain nodes.
1200
+
1201
+ If `data is False` (the default) then iterate over 2-tuples `(u, v)`.
1202
+ If `data is True` iterate over 3-tuples `(u, v, datadict)`.
1203
+ Otherwise iterate over `(u, v, datadict.get(data, default))`.
1204
+ For Multigraphs, if `keys is True`, replace `u, v` with `u, v, key` above.
1205
+
1206
+ Parameters
1207
+ ==========
1208
+ graph : NetworkX graph-like class
1209
+ nbunch : (default= all nodes in graph) only report edges with these nodes
1210
+ keys : (only for MultiGraph. default=False) report edge key in tuple
1211
+ data : bool or string (default=False) see above
1212
+ default : object (default=None)
1213
+
1214
+ Examples
1215
+ ========
1216
+ >>> G = nx.path_graph(4)
1217
+ >>> EV = G.edges()
1218
+ >>> (2, 3) in EV
1219
+ True
1220
+ >>> for u, v in EV:
1221
+ ... print((u, v))
1222
+ (0, 1)
1223
+ (1, 2)
1224
+ (2, 3)
1225
+ >>> assert EV & {(1, 2), (3, 4)} == {(1, 2)}
1226
+
1227
+ >>> EVdata = G.edges(data="color", default="aqua")
1228
+ >>> G.add_edge(2, 3, color="blue")
1229
+ >>> assert (2, 3, "blue") in EVdata
1230
+ >>> for u, v, c in EVdata:
1231
+ ... print(f"({u}, {v}) has color: {c}")
1232
+ (0, 1) has color: aqua
1233
+ (1, 2) has color: aqua
1234
+ (2, 3) has color: blue
1235
+
1236
+ >>> EVnbunch = G.edges(nbunch=2)
1237
+ >>> assert (2, 3) in EVnbunch
1238
+ >>> assert (0, 1) not in EVnbunch
1239
+ >>> for u, v in EVnbunch:
1240
+ ... assert u == 2 or v == 2
1241
+
1242
+ >>> MG = nx.path_graph(4, create_using=nx.MultiGraph)
1243
+ >>> EVmulti = MG.edges(keys=True)
1244
+ >>> (2, 3, 0) in EVmulti
1245
+ True
1246
+ >>> (2, 3) in EVmulti # 2-tuples work even when keys is True
1247
+ True
1248
+ >>> key = MG.add_edge(2, 3)
1249
+ >>> for u, v, k in EVmulti:
1250
+ ... print((u, v, k))
1251
+ (0, 1, 0)
1252
+ (1, 2, 0)
1253
+ (2, 3, 0)
1254
+ (2, 3, 1)
1255
+ """
1256
+
1257
+ __slots__ = ()
1258
+
1259
+ dataview = EdgeDataView
1260
+
1261
+ def __len__(self):
1262
+ num_nbrs = (len(nbrs) + (n in nbrs) for n, nbrs in self._nodes_nbrs())
1263
+ return sum(num_nbrs) // 2
1264
+
1265
+ def __iter__(self):
1266
+ seen = {}
1267
+ for n, nbrs in self._nodes_nbrs():
1268
+ for nbr in list(nbrs):
1269
+ if nbr not in seen:
1270
+ yield (n, nbr)
1271
+ seen[n] = 1
1272
+ del seen
1273
+
1274
+ def __contains__(self, e):
1275
+ try:
1276
+ u, v = e[:2]
1277
+ return v in self._adjdict[u] or u in self._adjdict[v]
1278
+ except (KeyError, ValueError):
1279
+ return False
1280
+
1281
+
1282
+ class InEdgeView(OutEdgeView):
1283
+ """A EdgeView class for inward edges of a DiGraph"""
1284
+
1285
+ __slots__ = ()
1286
+
1287
+ def __setstate__(self, state):
1288
+ self._graph = state["_graph"]
1289
+ self._adjdict = state["_adjdict"]
1290
+ self._nodes_nbrs = self._adjdict.items
1291
+
1292
+ dataview = InEdgeDataView
1293
+
1294
+ def __init__(self, G):
1295
+ self._graph = G
1296
+ self._adjdict = G._pred if hasattr(G, "pred") else G._adj
1297
+ self._nodes_nbrs = self._adjdict.items
1298
+
1299
+ def __iter__(self):
1300
+ for n, nbrs in self._nodes_nbrs():
1301
+ for nbr in nbrs:
1302
+ yield (nbr, n)
1303
+
1304
+ def __contains__(self, e):
1305
+ try:
1306
+ u, v = e
1307
+ return u in self._adjdict[v]
1308
+ except KeyError:
1309
+ return False
1310
+
1311
+ def __getitem__(self, e):
1312
+ if isinstance(e, slice):
1313
+ raise nx.NetworkXError(
1314
+ f"{type(self).__name__} does not support slicing, "
1315
+ f"try list(G.in_edges)[{e.start}:{e.stop}:{e.step}]"
1316
+ )
1317
+ u, v = e
1318
+ return self._adjdict[v][u]
1319
+
1320
+
1321
+ class OutMultiEdgeView(OutEdgeView):
1322
+ """A EdgeView class for outward edges of a MultiDiGraph"""
1323
+
1324
+ __slots__ = ()
1325
+
1326
+ dataview = OutMultiEdgeDataView
1327
+
1328
+ def __len__(self):
1329
+ return sum(
1330
+ len(kdict) for n, nbrs in self._nodes_nbrs() for nbr, kdict in nbrs.items()
1331
+ )
1332
+
1333
+ def __iter__(self):
1334
+ for n, nbrs in self._nodes_nbrs():
1335
+ for nbr, kdict in nbrs.items():
1336
+ for key in kdict:
1337
+ yield (n, nbr, key)
1338
+
1339
+ def __contains__(self, e):
1340
+ N = len(e)
1341
+ if N == 3:
1342
+ u, v, k = e
1343
+ elif N == 2:
1344
+ u, v = e
1345
+ k = 0
1346
+ else:
1347
+ raise ValueError("MultiEdge must have length 2 or 3")
1348
+ try:
1349
+ return k in self._adjdict[u][v]
1350
+ except KeyError:
1351
+ return False
1352
+
1353
+ def __getitem__(self, e):
1354
+ if isinstance(e, slice):
1355
+ raise nx.NetworkXError(
1356
+ f"{type(self).__name__} does not support slicing, "
1357
+ f"try list(G.edges)[{e.start}:{e.stop}:{e.step}]"
1358
+ )
1359
+ u, v, k = e
1360
+ return self._adjdict[u][v][k]
1361
+
1362
+ def __call__(self, nbunch=None, data=False, *, default=None, keys=False):
1363
+ if nbunch is None and data is False and keys is True:
1364
+ return self
1365
+ return self.dataview(self, nbunch, data, default=default, keys=keys)
1366
+
1367
+ def data(self, data=True, default=None, nbunch=None, keys=False):
1368
+ if nbunch is None and data is False and keys is True:
1369
+ return self
1370
+ return self.dataview(self, nbunch, data, default=default, keys=keys)
1371
+
1372
+
1373
+ class MultiEdgeView(OutMultiEdgeView):
1374
+ """A EdgeView class for edges of a MultiGraph"""
1375
+
1376
+ __slots__ = ()
1377
+
1378
+ dataview = MultiEdgeDataView
1379
+
1380
+ def __len__(self):
1381
+ return sum(1 for e in self)
1382
+
1383
+ def __iter__(self):
1384
+ seen = {}
1385
+ for n, nbrs in self._nodes_nbrs():
1386
+ for nbr, kd in nbrs.items():
1387
+ if nbr not in seen:
1388
+ for k, dd in kd.items():
1389
+ yield (n, nbr, k)
1390
+ seen[n] = 1
1391
+ del seen
1392
+
1393
+
1394
+ class InMultiEdgeView(OutMultiEdgeView):
1395
+ """A EdgeView class for inward edges of a MultiDiGraph"""
1396
+
1397
+ __slots__ = ()
1398
+
1399
+ def __setstate__(self, state):
1400
+ self._graph = state["_graph"]
1401
+ self._adjdict = state["_adjdict"]
1402
+ self._nodes_nbrs = self._adjdict.items
1403
+
1404
+ dataview = InMultiEdgeDataView
1405
+
1406
+ def __init__(self, G):
1407
+ self._graph = G
1408
+ self._adjdict = G._pred if hasattr(G, "pred") else G._adj
1409
+ self._nodes_nbrs = self._adjdict.items
1410
+
1411
+ def __iter__(self):
1412
+ for n, nbrs in self._nodes_nbrs():
1413
+ for nbr, kdict in nbrs.items():
1414
+ for key in kdict:
1415
+ yield (nbr, n, key)
1416
+
1417
+ def __contains__(self, e):
1418
+ N = len(e)
1419
+ if N == 3:
1420
+ u, v, k = e
1421
+ elif N == 2:
1422
+ u, v = e
1423
+ k = 0
1424
+ else:
1425
+ raise ValueError("MultiEdge must have length 2 or 3")
1426
+ try:
1427
+ return k in self._adjdict[v][u]
1428
+ except KeyError:
1429
+ return False
1430
+
1431
+ def __getitem__(self, e):
1432
+ if isinstance(e, slice):
1433
+ raise nx.NetworkXError(
1434
+ f"{type(self).__name__} does not support slicing, "
1435
+ f"try list(G.in_edges)[{e.start}:{e.stop}:{e.step}]"
1436
+ )
1437
+ u, v, k = e
1438
+ return self._adjdict[v][u][k]
venv/lib/python3.10/site-packages/networkx/classes/tests/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/networkx/classes/tests/dispatch_interface.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file contains utilities for testing the dispatching feature
2
+
3
+ # A full test of all dispatchable algorithms is performed by
4
+ # modifying the pytest invocation and setting an environment variable
5
+ # NETWORKX_TEST_BACKEND=nx-loopback pytest
6
+ # This is comprehensive, but only tests the `test_override_dispatch`
7
+ # function in networkx.classes.backends.
8
+
9
+ # To test the `_dispatchable` function directly, several tests scattered throughout
10
+ # NetworkX have been augmented to test normal and dispatch mode.
11
+ # Searching for `dispatch_interface` should locate the specific tests.
12
+
13
+ import networkx as nx
14
+ from networkx import DiGraph, Graph, MultiDiGraph, MultiGraph, PlanarEmbedding
15
+ from networkx.classes.reportviews import NodeView
16
+
17
+
18
+ class LoopbackGraph(Graph):
19
+ __networkx_backend__ = "nx-loopback"
20
+
21
+
22
+ class LoopbackDiGraph(DiGraph):
23
+ __networkx_backend__ = "nx-loopback"
24
+
25
+
26
+ class LoopbackMultiGraph(MultiGraph):
27
+ __networkx_backend__ = "nx-loopback"
28
+
29
+
30
+ class LoopbackMultiDiGraph(MultiDiGraph):
31
+ __networkx_backend__ = "nx-loopback"
32
+
33
+
34
+ class LoopbackPlanarEmbedding(PlanarEmbedding):
35
+ __networkx_backend__ = "nx-loopback"
36
+
37
+
38
+ def convert(graph):
39
+ if isinstance(graph, PlanarEmbedding):
40
+ return LoopbackPlanarEmbedding(graph)
41
+ if isinstance(graph, MultiDiGraph):
42
+ return LoopbackMultiDiGraph(graph)
43
+ if isinstance(graph, MultiGraph):
44
+ return LoopbackMultiGraph(graph)
45
+ if isinstance(graph, DiGraph):
46
+ return LoopbackDiGraph(graph)
47
+ if isinstance(graph, Graph):
48
+ return LoopbackGraph(graph)
49
+ raise TypeError(f"Unsupported type of graph: {type(graph)}")
50
+
51
+
52
+ class LoopbackDispatcher:
53
+ def __getattr__(self, item):
54
+ try:
55
+ return nx.utils.backends._registered_algorithms[item].orig_func
56
+ except KeyError:
57
+ raise AttributeError(item) from None
58
+
59
+ @staticmethod
60
+ def convert_from_nx(
61
+ graph,
62
+ *,
63
+ edge_attrs=None,
64
+ node_attrs=None,
65
+ preserve_edge_attrs=None,
66
+ preserve_node_attrs=None,
67
+ preserve_graph_attrs=None,
68
+ name=None,
69
+ graph_name=None,
70
+ ):
71
+ if name in {
72
+ # Raise if input graph changes
73
+ "lexicographical_topological_sort",
74
+ "topological_generations",
75
+ "topological_sort",
76
+ # Sensitive tests (iteration order matters)
77
+ "dfs_labeled_edges",
78
+ }:
79
+ return graph
80
+ if isinstance(graph, NodeView):
81
+ # Convert to a Graph with only nodes (no edges)
82
+ new_graph = Graph()
83
+ new_graph.add_nodes_from(graph.items())
84
+ graph = new_graph
85
+ G = LoopbackGraph()
86
+ elif not isinstance(graph, Graph):
87
+ raise TypeError(
88
+ f"Bad type for graph argument {graph_name} in {name}: {type(graph)}"
89
+ )
90
+ elif graph.__class__ in {Graph, LoopbackGraph}:
91
+ G = LoopbackGraph()
92
+ elif graph.__class__ in {DiGraph, LoopbackDiGraph}:
93
+ G = LoopbackDiGraph()
94
+ elif graph.__class__ in {MultiGraph, LoopbackMultiGraph}:
95
+ G = LoopbackMultiGraph()
96
+ elif graph.__class__ in {MultiDiGraph, LoopbackMultiDiGraph}:
97
+ G = LoopbackMultiDiGraph()
98
+ elif graph.__class__ in {PlanarEmbedding, LoopbackPlanarEmbedding}:
99
+ G = LoopbackDiGraph() # or LoopbackPlanarEmbedding
100
+ else:
101
+ # It would be nice to be able to convert _AntiGraph to a regular Graph
102
+ # nx.algorithms.approximation.kcomponents._AntiGraph
103
+ # nx.algorithms.tree.branchings.MultiDiGraph_EdgeKey
104
+ # nx.classes.tests.test_multidigraph.MultiDiGraphSubClass
105
+ # nx.classes.tests.test_multigraph.MultiGraphSubClass
106
+ G = graph.__class__()
107
+
108
+ if preserve_graph_attrs:
109
+ G.graph.update(graph.graph)
110
+
111
+ if preserve_node_attrs:
112
+ G.add_nodes_from(graph.nodes(data=True))
113
+ elif node_attrs:
114
+ G.add_nodes_from(
115
+ (
116
+ node,
117
+ {
118
+ k: datadict.get(k, default)
119
+ for k, default in node_attrs.items()
120
+ if default is not None or k in datadict
121
+ },
122
+ )
123
+ for node, datadict in graph.nodes(data=True)
124
+ )
125
+ else:
126
+ G.add_nodes_from(graph)
127
+
128
+ if graph.is_multigraph():
129
+ if preserve_edge_attrs:
130
+ G.add_edges_from(
131
+ (u, v, key, datadict)
132
+ for u, nbrs in graph._adj.items()
133
+ for v, keydict in nbrs.items()
134
+ for key, datadict in keydict.items()
135
+ )
136
+ elif edge_attrs:
137
+ G.add_edges_from(
138
+ (
139
+ u,
140
+ v,
141
+ key,
142
+ {
143
+ k: datadict.get(k, default)
144
+ for k, default in edge_attrs.items()
145
+ if default is not None or k in datadict
146
+ },
147
+ )
148
+ for u, nbrs in graph._adj.items()
149
+ for v, keydict in nbrs.items()
150
+ for key, datadict in keydict.items()
151
+ )
152
+ else:
153
+ G.add_edges_from(
154
+ (u, v, key, {})
155
+ for u, nbrs in graph._adj.items()
156
+ for v, keydict in nbrs.items()
157
+ for key, datadict in keydict.items()
158
+ )
159
+ elif preserve_edge_attrs:
160
+ G.add_edges_from(graph.edges(data=True))
161
+ elif edge_attrs:
162
+ G.add_edges_from(
163
+ (
164
+ u,
165
+ v,
166
+ {
167
+ k: datadict.get(k, default)
168
+ for k, default in edge_attrs.items()
169
+ if default is not None or k in datadict
170
+ },
171
+ )
172
+ for u, v, datadict in graph.edges(data=True)
173
+ )
174
+ else:
175
+ G.add_edges_from(graph.edges)
176
+ return G
177
+
178
+ @staticmethod
179
+ def convert_to_nx(obj, *, name=None):
180
+ return obj
181
+
182
+ @staticmethod
183
+ def on_start_tests(items):
184
+ # Verify that items can be xfailed
185
+ for item in items:
186
+ assert hasattr(item, "add_marker")
187
+
188
+ def can_run(self, name, args, kwargs):
189
+ # It is unnecessary to define this function if algorithms are fully supported.
190
+ # We include it for illustration purposes.
191
+ return hasattr(self, name)
192
+
193
+
194
+ dispatcher = LoopbackDispatcher()
venv/lib/python3.10/site-packages/networkx/classes/tests/historical_tests.py ADDED
@@ -0,0 +1,474 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Original NetworkX graph tests"""
2
+ import pytest
3
+
4
+ import networkx as nx
5
+ from networkx import convert_node_labels_to_integers as cnlti
6
+ from networkx.utils import edges_equal, nodes_equal
7
+
8
+
9
+ class HistoricalTests:
10
+ @classmethod
11
+ def setup_class(cls):
12
+ cls.null = nx.null_graph()
13
+ cls.P1 = cnlti(nx.path_graph(1), first_label=1)
14
+ cls.P3 = cnlti(nx.path_graph(3), first_label=1)
15
+ cls.P10 = cnlti(nx.path_graph(10), first_label=1)
16
+ cls.K1 = cnlti(nx.complete_graph(1), first_label=1)
17
+ cls.K3 = cnlti(nx.complete_graph(3), first_label=1)
18
+ cls.K4 = cnlti(nx.complete_graph(4), first_label=1)
19
+ cls.K5 = cnlti(nx.complete_graph(5), first_label=1)
20
+ cls.K10 = cnlti(nx.complete_graph(10), first_label=1)
21
+ cls.G = nx.Graph
22
+
23
+ def test_name(self):
24
+ G = self.G(name="test")
25
+ assert G.name == "test"
26
+ H = self.G()
27
+ assert H.name == ""
28
+
29
+ # Nodes
30
+
31
+ def test_add_remove_node(self):
32
+ G = self.G()
33
+ G.add_node("A")
34
+ assert G.has_node("A")
35
+ G.remove_node("A")
36
+ assert not G.has_node("A")
37
+
38
+ def test_nonhashable_node(self):
39
+ # Test if a non-hashable object is in the Graph. A python dict will
40
+ # raise a TypeError, but for a Graph class a simple False should be
41
+ # returned (see Graph __contains__). If it cannot be a node then it is
42
+ # not a node.
43
+ G = self.G()
44
+ assert not G.has_node(["A"])
45
+ assert not G.has_node({"A": 1})
46
+
47
+ def test_add_nodes_from(self):
48
+ G = self.G()
49
+ G.add_nodes_from(list("ABCDEFGHIJKL"))
50
+ assert G.has_node("L")
51
+ G.remove_nodes_from(["H", "I", "J", "K", "L"])
52
+ G.add_nodes_from([1, 2, 3, 4])
53
+ assert sorted(G.nodes(), key=str) == [
54
+ 1,
55
+ 2,
56
+ 3,
57
+ 4,
58
+ "A",
59
+ "B",
60
+ "C",
61
+ "D",
62
+ "E",
63
+ "F",
64
+ "G",
65
+ ]
66
+ # test __iter__
67
+ assert sorted(G, key=str) == [1, 2, 3, 4, "A", "B", "C", "D", "E", "F", "G"]
68
+
69
+ def test_contains(self):
70
+ G = self.G()
71
+ G.add_node("A")
72
+ assert "A" in G
73
+ assert [] not in G # never raise a Key or TypeError in this test
74
+ assert {1: 1} not in G
75
+
76
+ def test_add_remove(self):
77
+ # Test add_node and remove_node acting for various nbunch
78
+ G = self.G()
79
+ G.add_node("m")
80
+ assert G.has_node("m")
81
+ G.add_node("m") # no complaints
82
+ pytest.raises(nx.NetworkXError, G.remove_node, "j")
83
+ G.remove_node("m")
84
+ assert list(G) == []
85
+
86
+ def test_nbunch_is_list(self):
87
+ G = self.G()
88
+ G.add_nodes_from(list("ABCD"))
89
+ G.add_nodes_from(self.P3) # add nbunch of nodes (nbunch=Graph)
90
+ assert sorted(G.nodes(), key=str) == [1, 2, 3, "A", "B", "C", "D"]
91
+ G.remove_nodes_from(self.P3) # remove nbunch of nodes (nbunch=Graph)
92
+ assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D"]
93
+
94
+ def test_nbunch_is_set(self):
95
+ G = self.G()
96
+ nbunch = set("ABCDEFGHIJKL")
97
+ G.add_nodes_from(nbunch)
98
+ assert G.has_node("L")
99
+
100
+ def test_nbunch_dict(self):
101
+ # nbunch is a dict with nodes as keys
102
+ G = self.G()
103
+ nbunch = set("ABCDEFGHIJKL")
104
+ G.add_nodes_from(nbunch)
105
+ nbunch = {"I": "foo", "J": 2, "K": True, "L": "spam"}
106
+ G.remove_nodes_from(nbunch)
107
+ assert sorted(G.nodes(), key=str), ["A", "B", "C", "D", "E", "F", "G", "H"]
108
+
109
+ def test_nbunch_iterator(self):
110
+ G = self.G()
111
+ G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"])
112
+ n_iter = self.P3.nodes()
113
+ G.add_nodes_from(n_iter)
114
+ assert sorted(G.nodes(), key=str) == [
115
+ 1,
116
+ 2,
117
+ 3,
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D",
122
+ "E",
123
+ "F",
124
+ "G",
125
+ "H",
126
+ ]
127
+ n_iter = self.P3.nodes() # rebuild same iterator
128
+ G.remove_nodes_from(n_iter) # remove nbunch of nodes (nbunch=iterator)
129
+ assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D", "E", "F", "G", "H"]
130
+
131
+ def test_nbunch_graph(self):
132
+ G = self.G()
133
+ G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"])
134
+ nbunch = self.K3
135
+ G.add_nodes_from(nbunch)
136
+ assert sorted(G.nodes(), key=str), [
137
+ 1,
138
+ 2,
139
+ 3,
140
+ "A",
141
+ "B",
142
+ "C",
143
+ "D",
144
+ "E",
145
+ "F",
146
+ "G",
147
+ "H",
148
+ ]
149
+
150
+ # Edges
151
+
152
+ def test_add_edge(self):
153
+ G = self.G()
154
+ pytest.raises(TypeError, G.add_edge, "A")
155
+
156
+ G.add_edge("A", "B") # testing add_edge()
157
+ G.add_edge("A", "B") # should fail silently
158
+ assert G.has_edge("A", "B")
159
+ assert not G.has_edge("A", "C")
160
+ assert G.has_edge(*("A", "B"))
161
+ if G.is_directed():
162
+ assert not G.has_edge("B", "A")
163
+ else:
164
+ # G is undirected, so B->A is an edge
165
+ assert G.has_edge("B", "A")
166
+
167
+ G.add_edge("A", "C") # test directedness
168
+ G.add_edge("C", "A")
169
+ G.remove_edge("C", "A")
170
+ if G.is_directed():
171
+ assert G.has_edge("A", "C")
172
+ else:
173
+ assert not G.has_edge("A", "C")
174
+ assert not G.has_edge("C", "A")
175
+
176
+ def test_self_loop(self):
177
+ G = self.G()
178
+ G.add_edge("A", "A") # test self loops
179
+ assert G.has_edge("A", "A")
180
+ G.remove_edge("A", "A")
181
+ G.add_edge("X", "X")
182
+ assert G.has_node("X")
183
+ G.remove_node("X")
184
+ G.add_edge("A", "Z") # should add the node silently
185
+ assert G.has_node("Z")
186
+
187
+ def test_add_edges_from(self):
188
+ G = self.G()
189
+ G.add_edges_from([("B", "C")]) # test add_edges_from()
190
+ assert G.has_edge("B", "C")
191
+ if G.is_directed():
192
+ assert not G.has_edge("C", "B")
193
+ else:
194
+ assert G.has_edge("C", "B") # undirected
195
+
196
+ G.add_edges_from([("D", "F"), ("B", "D")])
197
+ assert G.has_edge("D", "F")
198
+ assert G.has_edge("B", "D")
199
+
200
+ if G.is_directed():
201
+ assert not G.has_edge("D", "B")
202
+ else:
203
+ assert G.has_edge("D", "B") # undirected
204
+
205
+ def test_add_edges_from2(self):
206
+ G = self.G()
207
+ # after failing silently, should add 2nd edge
208
+ G.add_edges_from([tuple("IJ"), list("KK"), tuple("JK")])
209
+ assert G.has_edge(*("I", "J"))
210
+ assert G.has_edge(*("K", "K"))
211
+ assert G.has_edge(*("J", "K"))
212
+ if G.is_directed():
213
+ assert not G.has_edge(*("K", "J"))
214
+ else:
215
+ assert G.has_edge(*("K", "J"))
216
+
217
+ def test_add_edges_from3(self):
218
+ G = self.G()
219
+ G.add_edges_from(zip(list("ACD"), list("CDE")))
220
+ assert G.has_edge("D", "E")
221
+ assert not G.has_edge("E", "C")
222
+
223
+ def test_remove_edge(self):
224
+ G = self.G()
225
+ G.add_nodes_from([1, 2, 3, "A", "B", "C", "D", "E", "F", "G", "H"])
226
+
227
+ G.add_edges_from(zip(list("MNOP"), list("NOPM")))
228
+ assert G.has_edge("O", "P")
229
+ assert G.has_edge("P", "M")
230
+ G.remove_node("P") # tests remove_node()'s handling of edges.
231
+ assert not G.has_edge("P", "M")
232
+ pytest.raises(TypeError, G.remove_edge, "M")
233
+
234
+ G.add_edge("N", "M")
235
+ assert G.has_edge("M", "N")
236
+ G.remove_edge("M", "N")
237
+ assert not G.has_edge("M", "N")
238
+
239
+ # self loop fails silently
240
+ G.remove_edges_from([list("HI"), list("DF"), tuple("KK"), tuple("JK")])
241
+ assert not G.has_edge("H", "I")
242
+ assert not G.has_edge("J", "K")
243
+ G.remove_edges_from([list("IJ"), list("KK"), list("JK")])
244
+ assert not G.has_edge("I", "J")
245
+ G.remove_nodes_from(set("ZEFHIMNO"))
246
+ G.add_edge("J", "K")
247
+
248
+ def test_edges_nbunch(self):
249
+ # Test G.edges(nbunch) with various forms of nbunch
250
+ G = self.G()
251
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")])
252
+ # node not in nbunch should be quietly ignored
253
+ pytest.raises(nx.NetworkXError, G.edges, 6)
254
+ assert list(G.edges("Z")) == [] # iterable non-node
255
+ # nbunch can be an empty list
256
+ assert list(G.edges([])) == []
257
+ if G.is_directed():
258
+ elist = [("A", "B"), ("A", "C"), ("B", "D")]
259
+ else:
260
+ elist = [("A", "B"), ("A", "C"), ("B", "C"), ("B", "D")]
261
+ # nbunch can be a list
262
+ assert edges_equal(list(G.edges(["A", "B"])), elist)
263
+ # nbunch can be a set
264
+ assert edges_equal(G.edges({"A", "B"}), elist)
265
+ # nbunch can be a graph
266
+ G1 = self.G()
267
+ G1.add_nodes_from("AB")
268
+ assert edges_equal(G.edges(G1), elist)
269
+ # nbunch can be a dict with nodes as keys
270
+ ndict = {"A": "thing1", "B": "thing2"}
271
+ assert edges_equal(G.edges(ndict), elist)
272
+ # nbunch can be a single node
273
+ assert edges_equal(list(G.edges("A")), [("A", "B"), ("A", "C")])
274
+ assert nodes_equal(sorted(G), ["A", "B", "C", "D"])
275
+
276
+ # nbunch can be nothing (whole graph)
277
+ assert edges_equal(
278
+ list(G.edges()),
279
+ [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")],
280
+ )
281
+
282
+ def test_degree(self):
283
+ G = self.G()
284
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")])
285
+ assert G.degree("A") == 2
286
+
287
+ # degree of single node in iterable container must return dict
288
+ assert list(G.degree(["A"])) == [("A", 2)]
289
+ assert sorted(d for n, d in G.degree(["A", "B"])) == [2, 3]
290
+ assert sorted(d for n, d in G.degree()) == [2, 2, 3, 3]
291
+
292
+ def test_degree2(self):
293
+ H = self.G()
294
+ H.add_edges_from([(1, 24), (1, 2)])
295
+ assert sorted(d for n, d in H.degree([1, 24])) == [1, 2]
296
+
297
+ def test_degree_graph(self):
298
+ P3 = nx.path_graph(3)
299
+ P5 = nx.path_graph(5)
300
+ # silently ignore nodes not in P3
301
+ assert dict(d for n, d in P3.degree(["A", "B"])) == {}
302
+ # nbunch can be a graph
303
+ assert sorted(d for n, d in P5.degree(P3)) == [1, 2, 2]
304
+ # nbunch can be a graph that's way too big
305
+ assert sorted(d for n, d in P3.degree(P5)) == [1, 1, 2]
306
+ assert list(P5.degree([])) == []
307
+ assert dict(P5.degree([])) == {}
308
+
309
+ def test_null(self):
310
+ null = nx.null_graph()
311
+ assert list(null.degree()) == []
312
+ assert dict(null.degree()) == {}
313
+
314
+ def test_order_size(self):
315
+ G = self.G()
316
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")])
317
+ assert G.order() == 4
318
+ assert G.size() == 5
319
+ assert G.number_of_edges() == 5
320
+ assert G.number_of_edges("A", "B") == 1
321
+ assert G.number_of_edges("A", "D") == 0
322
+
323
+ def test_copy(self):
324
+ G = self.G()
325
+ H = G.copy() # copy
326
+ assert H.adj == G.adj
327
+ assert H.name == G.name
328
+ assert H is not G
329
+
330
+ def test_subgraph(self):
331
+ G = self.G()
332
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")])
333
+ SG = G.subgraph(["A", "B", "D"])
334
+ assert nodes_equal(list(SG), ["A", "B", "D"])
335
+ assert edges_equal(list(SG.edges()), [("A", "B"), ("B", "D")])
336
+
337
+ def test_to_directed(self):
338
+ G = self.G()
339
+ if not G.is_directed():
340
+ G.add_edges_from(
341
+ [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]
342
+ )
343
+
344
+ DG = G.to_directed()
345
+ assert DG is not G # directed copy or copy
346
+
347
+ assert DG.is_directed()
348
+ assert DG.name == G.name
349
+ assert DG.adj == G.adj
350
+ assert sorted(DG.out_edges(list("AB"))) == [
351
+ ("A", "B"),
352
+ ("A", "C"),
353
+ ("B", "A"),
354
+ ("B", "C"),
355
+ ("B", "D"),
356
+ ]
357
+ DG.remove_edge("A", "B")
358
+ assert DG.has_edge("B", "A") # this removes B-A but not A-B
359
+ assert not DG.has_edge("A", "B")
360
+
361
+ def test_to_undirected(self):
362
+ G = self.G()
363
+ if G.is_directed():
364
+ G.add_edges_from(
365
+ [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]
366
+ )
367
+ UG = G.to_undirected() # to_undirected
368
+ assert UG is not G
369
+ assert not UG.is_directed()
370
+ assert G.is_directed()
371
+ assert UG.name == G.name
372
+ assert UG.adj != G.adj
373
+ assert sorted(UG.edges(list("AB"))) == [
374
+ ("A", "B"),
375
+ ("A", "C"),
376
+ ("B", "C"),
377
+ ("B", "D"),
378
+ ]
379
+ assert sorted(UG.edges(["A", "B"])) == [
380
+ ("A", "B"),
381
+ ("A", "C"),
382
+ ("B", "C"),
383
+ ("B", "D"),
384
+ ]
385
+ UG.remove_edge("A", "B")
386
+ assert not UG.has_edge("B", "A")
387
+ assert not UG.has_edge("A", "B")
388
+
389
+ def test_neighbors(self):
390
+ G = self.G()
391
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")])
392
+ G.add_nodes_from("GJK")
393
+ assert sorted(G["A"]) == ["B", "C"]
394
+ assert sorted(G.neighbors("A")) == ["B", "C"]
395
+ assert sorted(G.neighbors("A")) == ["B", "C"]
396
+ assert sorted(G.neighbors("G")) == []
397
+ pytest.raises(nx.NetworkXError, G.neighbors, "j")
398
+
399
+ def test_iterators(self):
400
+ G = self.G()
401
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")])
402
+ G.add_nodes_from("GJK")
403
+ assert sorted(G.nodes()) == ["A", "B", "C", "D", "G", "J", "K"]
404
+ assert edges_equal(
405
+ G.edges(), [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]
406
+ )
407
+
408
+ assert sorted(v for k, v in G.degree()) == [0, 0, 0, 2, 2, 3, 3]
409
+ assert sorted(G.degree(), key=str) == [
410
+ ("A", 2),
411
+ ("B", 3),
412
+ ("C", 3),
413
+ ("D", 2),
414
+ ("G", 0),
415
+ ("J", 0),
416
+ ("K", 0),
417
+ ]
418
+ assert sorted(G.neighbors("A")) == ["B", "C"]
419
+ pytest.raises(nx.NetworkXError, G.neighbors, "X")
420
+ G.clear()
421
+ assert nx.number_of_nodes(G) == 0
422
+ assert nx.number_of_edges(G) == 0
423
+
424
+ def test_null_subgraph(self):
425
+ # Subgraph of a null graph is a null graph
426
+ nullgraph = nx.null_graph()
427
+ G = nx.null_graph()
428
+ H = G.subgraph([])
429
+ assert nx.is_isomorphic(H, nullgraph)
430
+
431
+ def test_empty_subgraph(self):
432
+ # Subgraph of an empty graph is an empty graph. test 1
433
+ nullgraph = nx.null_graph()
434
+ E5 = nx.empty_graph(5)
435
+ E10 = nx.empty_graph(10)
436
+ H = E10.subgraph([])
437
+ assert nx.is_isomorphic(H, nullgraph)
438
+ H = E10.subgraph([1, 2, 3, 4, 5])
439
+ assert nx.is_isomorphic(H, E5)
440
+
441
+ def test_complete_subgraph(self):
442
+ # Subgraph of a complete graph is a complete graph
443
+ K1 = nx.complete_graph(1)
444
+ K3 = nx.complete_graph(3)
445
+ K5 = nx.complete_graph(5)
446
+ H = K5.subgraph([1, 2, 3])
447
+ assert nx.is_isomorphic(H, K3)
448
+
449
+ def test_subgraph_nbunch(self):
450
+ nullgraph = nx.null_graph()
451
+ K1 = nx.complete_graph(1)
452
+ K3 = nx.complete_graph(3)
453
+ K5 = nx.complete_graph(5)
454
+ # Test G.subgraph(nbunch), where nbunch is a single node
455
+ H = K5.subgraph(1)
456
+ assert nx.is_isomorphic(H, K1)
457
+ # Test G.subgraph(nbunch), where nbunch is a set
458
+ H = K5.subgraph({1})
459
+ assert nx.is_isomorphic(H, K1)
460
+ # Test G.subgraph(nbunch), where nbunch is an iterator
461
+ H = K5.subgraph(iter(K3))
462
+ assert nx.is_isomorphic(H, K3)
463
+ # Test G.subgraph(nbunch), where nbunch is another graph
464
+ H = K5.subgraph(K3)
465
+ assert nx.is_isomorphic(H, K3)
466
+ H = K5.subgraph([9])
467
+ assert nx.is_isomorphic(H, nullgraph)
468
+
469
+ def test_node_tuple_issue(self):
470
+ H = self.G()
471
+ # Test error handling of tuple as a node
472
+ pytest.raises(nx.NetworkXError, H.remove_node, (1, 2))
473
+ H.remove_nodes_from([(1, 2)]) # no error
474
+ pytest.raises(nx.NetworkXError, H.neighbors, (1, 2))
venv/lib/python3.10/site-packages/networkx/classes/tests/test_digraph.py ADDED
@@ -0,0 +1,331 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.utils import nodes_equal
5
+
6
+ from .test_graph import BaseAttrGraphTester, BaseGraphTester
7
+ from .test_graph import TestEdgeSubgraph as _TestGraphEdgeSubgraph
8
+ from .test_graph import TestGraph as _TestGraph
9
+
10
+
11
+ class BaseDiGraphTester(BaseGraphTester):
12
+ def test_has_successor(self):
13
+ G = self.K3
14
+ assert G.has_successor(0, 1)
15
+ assert not G.has_successor(0, -1)
16
+
17
+ def test_successors(self):
18
+ G = self.K3
19
+ assert sorted(G.successors(0)) == [1, 2]
20
+ with pytest.raises(nx.NetworkXError):
21
+ G.successors(-1)
22
+
23
+ def test_has_predecessor(self):
24
+ G = self.K3
25
+ assert G.has_predecessor(0, 1)
26
+ assert not G.has_predecessor(0, -1)
27
+
28
+ def test_predecessors(self):
29
+ G = self.K3
30
+ assert sorted(G.predecessors(0)) == [1, 2]
31
+ with pytest.raises(nx.NetworkXError):
32
+ G.predecessors(-1)
33
+
34
+ def test_edges(self):
35
+ G = self.K3
36
+ assert sorted(G.edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
37
+ assert sorted(G.edges(0)) == [(0, 1), (0, 2)]
38
+ assert sorted(G.edges([0, 1])) == [(0, 1), (0, 2), (1, 0), (1, 2)]
39
+ with pytest.raises(nx.NetworkXError):
40
+ G.edges(-1)
41
+
42
+ def test_out_edges(self):
43
+ G = self.K3
44
+ assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
45
+ assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)]
46
+ with pytest.raises(nx.NetworkXError):
47
+ G.out_edges(-1)
48
+
49
+ def test_out_edges_dir(self):
50
+ G = self.P3
51
+ assert sorted(G.out_edges()) == [(0, 1), (1, 2)]
52
+ assert sorted(G.out_edges(0)) == [(0, 1)]
53
+ assert sorted(G.out_edges(2)) == []
54
+
55
+ def test_out_edges_data(self):
56
+ G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})])
57
+ assert sorted(G.out_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})]
58
+ assert sorted(G.out_edges(0, data=True)) == [(0, 1, {"data": 0})]
59
+ assert sorted(G.out_edges(data="data")) == [(0, 1, 0), (1, 0, None)]
60
+ assert sorted(G.out_edges(0, data="data")) == [(0, 1, 0)]
61
+
62
+ def test_in_edges_dir(self):
63
+ G = self.P3
64
+ assert sorted(G.in_edges()) == [(0, 1), (1, 2)]
65
+ assert sorted(G.in_edges(0)) == []
66
+ assert sorted(G.in_edges(2)) == [(1, 2)]
67
+
68
+ def test_in_edges_data(self):
69
+ G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})])
70
+ assert sorted(G.in_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})]
71
+ assert sorted(G.in_edges(1, data=True)) == [(0, 1, {"data": 0})]
72
+ assert sorted(G.in_edges(data="data")) == [(0, 1, 0), (1, 0, None)]
73
+ assert sorted(G.in_edges(1, data="data")) == [(0, 1, 0)]
74
+
75
+ def test_degree(self):
76
+ G = self.K3
77
+ assert sorted(G.degree()) == [(0, 4), (1, 4), (2, 4)]
78
+ assert dict(G.degree()) == {0: 4, 1: 4, 2: 4}
79
+ assert G.degree(0) == 4
80
+ assert list(G.degree(iter([0]))) == [(0, 4)] # run through iterator
81
+
82
+ def test_in_degree(self):
83
+ G = self.K3
84
+ assert sorted(G.in_degree()) == [(0, 2), (1, 2), (2, 2)]
85
+ assert dict(G.in_degree()) == {0: 2, 1: 2, 2: 2}
86
+ assert G.in_degree(0) == 2
87
+ assert list(G.in_degree(iter([0]))) == [(0, 2)] # run through iterator
88
+
89
+ def test_out_degree(self):
90
+ G = self.K3
91
+ assert sorted(G.out_degree()) == [(0, 2), (1, 2), (2, 2)]
92
+ assert dict(G.out_degree()) == {0: 2, 1: 2, 2: 2}
93
+ assert G.out_degree(0) == 2
94
+ assert list(G.out_degree(iter([0]))) == [(0, 2)]
95
+
96
+ def test_size(self):
97
+ G = self.K3
98
+ assert G.size() == 6
99
+ assert G.number_of_edges() == 6
100
+
101
+ def test_to_undirected_reciprocal(self):
102
+ G = self.Graph()
103
+ G.add_edge(1, 2)
104
+ assert G.to_undirected().has_edge(1, 2)
105
+ assert not G.to_undirected(reciprocal=True).has_edge(1, 2)
106
+ G.add_edge(2, 1)
107
+ assert G.to_undirected(reciprocal=True).has_edge(1, 2)
108
+
109
+ def test_reverse_copy(self):
110
+ G = nx.DiGraph([(0, 1), (1, 2)])
111
+ R = G.reverse()
112
+ assert sorted(R.edges()) == [(1, 0), (2, 1)]
113
+ R.remove_edge(1, 0)
114
+ assert sorted(R.edges()) == [(2, 1)]
115
+ assert sorted(G.edges()) == [(0, 1), (1, 2)]
116
+
117
+ def test_reverse_nocopy(self):
118
+ G = nx.DiGraph([(0, 1), (1, 2)])
119
+ R = G.reverse(copy=False)
120
+ assert sorted(R.edges()) == [(1, 0), (2, 1)]
121
+ with pytest.raises(nx.NetworkXError):
122
+ R.remove_edge(1, 0)
123
+
124
+ def test_reverse_hashable(self):
125
+ class Foo:
126
+ pass
127
+
128
+ x = Foo()
129
+ y = Foo()
130
+ G = nx.DiGraph()
131
+ G.add_edge(x, y)
132
+ assert nodes_equal(G.nodes(), G.reverse().nodes())
133
+ assert [(y, x)] == list(G.reverse().edges())
134
+
135
+ def test_di_cache_reset(self):
136
+ G = self.K3.copy()
137
+ old_succ = G.succ
138
+ assert id(G.succ) == id(old_succ)
139
+ old_adj = G.adj
140
+ assert id(G.adj) == id(old_adj)
141
+
142
+ G._succ = {}
143
+ assert id(G.succ) != id(old_succ)
144
+ assert id(G.adj) != id(old_adj)
145
+
146
+ old_pred = G.pred
147
+ assert id(G.pred) == id(old_pred)
148
+ G._pred = {}
149
+ assert id(G.pred) != id(old_pred)
150
+
151
+ def test_di_attributes_cached(self):
152
+ G = self.K3.copy()
153
+ assert id(G.in_edges) == id(G.in_edges)
154
+ assert id(G.out_edges) == id(G.out_edges)
155
+ assert id(G.in_degree) == id(G.in_degree)
156
+ assert id(G.out_degree) == id(G.out_degree)
157
+ assert id(G.succ) == id(G.succ)
158
+ assert id(G.pred) == id(G.pred)
159
+
160
+
161
+ class BaseAttrDiGraphTester(BaseDiGraphTester, BaseAttrGraphTester):
162
+ def test_edges_data(self):
163
+ G = self.K3
164
+ all_edges = [
165
+ (0, 1, {}),
166
+ (0, 2, {}),
167
+ (1, 0, {}),
168
+ (1, 2, {}),
169
+ (2, 0, {}),
170
+ (2, 1, {}),
171
+ ]
172
+ assert sorted(G.edges(data=True)) == all_edges
173
+ assert sorted(G.edges(0, data=True)) == all_edges[:2]
174
+ assert sorted(G.edges([0, 1], data=True)) == all_edges[:4]
175
+ with pytest.raises(nx.NetworkXError):
176
+ G.edges(-1, True)
177
+
178
+ def test_in_degree_weighted(self):
179
+ G = self.K3.copy()
180
+ G.add_edge(0, 1, weight=0.3, other=1.2)
181
+ assert sorted(G.in_degree(weight="weight")) == [(0, 2), (1, 1.3), (2, 2)]
182
+ assert dict(G.in_degree(weight="weight")) == {0: 2, 1: 1.3, 2: 2}
183
+ assert G.in_degree(1, weight="weight") == 1.3
184
+ assert sorted(G.in_degree(weight="other")) == [(0, 2), (1, 2.2), (2, 2)]
185
+ assert dict(G.in_degree(weight="other")) == {0: 2, 1: 2.2, 2: 2}
186
+ assert G.in_degree(1, weight="other") == 2.2
187
+ assert list(G.in_degree(iter([1]), weight="other")) == [(1, 2.2)]
188
+
189
+ def test_out_degree_weighted(self):
190
+ G = self.K3.copy()
191
+ G.add_edge(0, 1, weight=0.3, other=1.2)
192
+ assert sorted(G.out_degree(weight="weight")) == [(0, 1.3), (1, 2), (2, 2)]
193
+ assert dict(G.out_degree(weight="weight")) == {0: 1.3, 1: 2, 2: 2}
194
+ assert G.out_degree(0, weight="weight") == 1.3
195
+ assert sorted(G.out_degree(weight="other")) == [(0, 2.2), (1, 2), (2, 2)]
196
+ assert dict(G.out_degree(weight="other")) == {0: 2.2, 1: 2, 2: 2}
197
+ assert G.out_degree(0, weight="other") == 2.2
198
+ assert list(G.out_degree(iter([0]), weight="other")) == [(0, 2.2)]
199
+
200
+
201
+ class TestDiGraph(BaseAttrDiGraphTester, _TestGraph):
202
+ """Tests specific to dict-of-dict-of-dict digraph data structure"""
203
+
204
+ def setup_method(self):
205
+ self.Graph = nx.DiGraph
206
+ # build dict-of-dict-of-dict K3
207
+ ed1, ed2, ed3, ed4, ed5, ed6 = ({}, {}, {}, {}, {}, {})
208
+ self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed3, 2: ed4}, 2: {0: ed5, 1: ed6}}
209
+ self.k3edges = [(0, 1), (0, 2), (1, 2)]
210
+ self.k3nodes = [0, 1, 2]
211
+ self.K3 = self.Graph()
212
+ self.K3._succ = self.k3adj # K3._adj is synced with K3._succ
213
+ self.K3._pred = {0: {1: ed3, 2: ed5}, 1: {0: ed1, 2: ed6}, 2: {0: ed2, 1: ed4}}
214
+ self.K3._node = {}
215
+ self.K3._node[0] = {}
216
+ self.K3._node[1] = {}
217
+ self.K3._node[2] = {}
218
+
219
+ ed1, ed2 = ({}, {})
220
+ self.P3 = self.Graph()
221
+ self.P3._succ = {0: {1: ed1}, 1: {2: ed2}, 2: {}}
222
+ self.P3._pred = {0: {}, 1: {0: ed1}, 2: {1: ed2}}
223
+ # P3._adj is synced with P3._succ
224
+ self.P3._node = {}
225
+ self.P3._node[0] = {}
226
+ self.P3._node[1] = {}
227
+ self.P3._node[2] = {}
228
+
229
+ def test_data_input(self):
230
+ G = self.Graph({1: [2], 2: [1]}, name="test")
231
+ assert G.name == "test"
232
+ assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})]
233
+ assert sorted(G.succ.items()) == [(1, {2: {}}), (2, {1: {}})]
234
+ assert sorted(G.pred.items()) == [(1, {2: {}}), (2, {1: {}})]
235
+
236
+ def test_add_edge(self):
237
+ G = self.Graph()
238
+ G.add_edge(0, 1)
239
+ assert G.adj == {0: {1: {}}, 1: {}}
240
+ assert G.succ == {0: {1: {}}, 1: {}}
241
+ assert G.pred == {0: {}, 1: {0: {}}}
242
+ G = self.Graph()
243
+ G.add_edge(*(0, 1))
244
+ assert G.adj == {0: {1: {}}, 1: {}}
245
+ assert G.succ == {0: {1: {}}, 1: {}}
246
+ assert G.pred == {0: {}, 1: {0: {}}}
247
+ with pytest.raises(ValueError, match="None cannot be a node"):
248
+ G.add_edge(None, 3)
249
+
250
+ def test_add_edges_from(self):
251
+ G = self.Graph()
252
+ G.add_edges_from([(0, 1), (0, 2, {"data": 3})], data=2)
253
+ assert G.adj == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}}
254
+ assert G.succ == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}}
255
+ assert G.pred == {0: {}, 1: {0: {"data": 2}}, 2: {0: {"data": 3}}}
256
+
257
+ with pytest.raises(nx.NetworkXError):
258
+ G.add_edges_from([(0,)]) # too few in tuple
259
+ with pytest.raises(nx.NetworkXError):
260
+ G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple
261
+ with pytest.raises(TypeError):
262
+ G.add_edges_from([0]) # not a tuple
263
+ with pytest.raises(ValueError, match="None cannot be a node"):
264
+ G.add_edges_from([(None, 3), (3, 2)])
265
+
266
+ def test_remove_edge(self):
267
+ G = self.K3.copy()
268
+ G.remove_edge(0, 1)
269
+ assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}}
270
+ assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}}
271
+ with pytest.raises(nx.NetworkXError):
272
+ G.remove_edge(-1, 0)
273
+
274
+ def test_remove_edges_from(self):
275
+ G = self.K3.copy()
276
+ G.remove_edges_from([(0, 1)])
277
+ assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}}
278
+ assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}}
279
+ G.remove_edges_from([(0, 0)]) # silent fail
280
+
281
+ def test_clear(self):
282
+ G = self.K3
283
+ G.graph["name"] = "K3"
284
+ G.clear()
285
+ assert list(G.nodes) == []
286
+ assert G.succ == {}
287
+ assert G.pred == {}
288
+ assert G.graph == {}
289
+
290
+ def test_clear_edges(self):
291
+ G = self.K3
292
+ G.graph["name"] = "K3"
293
+ nodes = list(G.nodes)
294
+ G.clear_edges()
295
+ assert list(G.nodes) == nodes
296
+ expected = {0: {}, 1: {}, 2: {}}
297
+ assert G.succ == expected
298
+ assert G.pred == expected
299
+ assert list(G.edges) == []
300
+ assert G.graph["name"] == "K3"
301
+
302
+
303
+ class TestEdgeSubgraph(_TestGraphEdgeSubgraph):
304
+ """Unit tests for the :meth:`DiGraph.edge_subgraph` method."""
305
+
306
+ def setup_method(self):
307
+ # Create a doubly-linked path graph on five nodes.
308
+ G = nx.DiGraph(nx.path_graph(5))
309
+ # Add some node, edge, and graph attributes.
310
+ for i in range(5):
311
+ G.nodes[i]["name"] = f"node{i}"
312
+ G.edges[0, 1]["name"] = "edge01"
313
+ G.edges[3, 4]["name"] = "edge34"
314
+ G.graph["name"] = "graph"
315
+ # Get the subgraph induced by the first and last edges.
316
+ self.G = G
317
+ self.H = G.edge_subgraph([(0, 1), (3, 4)])
318
+
319
+ def test_pred_succ(self):
320
+ """Test that nodes are added to predecessors and successors.
321
+
322
+ For more information, see GitHub issue #2370.
323
+
324
+ """
325
+ G = nx.DiGraph()
326
+ G.add_edge(0, 1)
327
+ H = G.edge_subgraph([(0, 1)])
328
+ assert list(H.predecessors(0)) == []
329
+ assert list(H.successors(0)) == [1]
330
+ assert list(H.predecessors(1)) == [0]
331
+ assert list(H.successors(1)) == []
venv/lib/python3.10/site-packages/networkx/classes/tests/test_digraph_historical.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Original NetworkX graph tests"""
2
+ import pytest
3
+
4
+ import networkx
5
+ import networkx as nx
6
+
7
+ from .historical_tests import HistoricalTests
8
+
9
+
10
+ class TestDiGraphHistorical(HistoricalTests):
11
+ @classmethod
12
+ def setup_class(cls):
13
+ HistoricalTests.setup_class()
14
+ cls.G = nx.DiGraph
15
+
16
+ def test_in_degree(self):
17
+ G = self.G()
18
+ G.add_nodes_from("GJK")
19
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")])
20
+
21
+ assert sorted(d for n, d in G.in_degree()) == [0, 0, 0, 0, 1, 2, 2]
22
+ assert dict(G.in_degree()) == {
23
+ "A": 0,
24
+ "C": 2,
25
+ "B": 1,
26
+ "D": 2,
27
+ "G": 0,
28
+ "K": 0,
29
+ "J": 0,
30
+ }
31
+
32
+ def test_out_degree(self):
33
+ G = self.G()
34
+ G.add_nodes_from("GJK")
35
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")])
36
+ assert sorted(v for k, v in G.in_degree()) == [0, 0, 0, 0, 1, 2, 2]
37
+ assert dict(G.out_degree()) == {
38
+ "A": 2,
39
+ "C": 1,
40
+ "B": 2,
41
+ "D": 0,
42
+ "G": 0,
43
+ "K": 0,
44
+ "J": 0,
45
+ }
46
+
47
+ def test_degree_digraph(self):
48
+ H = nx.DiGraph()
49
+ H.add_edges_from([(1, 24), (1, 2)])
50
+ assert sorted(d for n, d in H.in_degree([1, 24])) == [0, 1]
51
+ assert sorted(d for n, d in H.out_degree([1, 24])) == [0, 2]
52
+ assert sorted(d for n, d in H.degree([1, 24])) == [1, 2]
53
+
54
+ def test_neighbors(self):
55
+ G = self.G()
56
+ G.add_nodes_from("GJK")
57
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")])
58
+
59
+ assert sorted(G.neighbors("C")) == ["D"]
60
+ assert sorted(G["C"]) == ["D"]
61
+ assert sorted(G.neighbors("A")) == ["B", "C"]
62
+ pytest.raises(nx.NetworkXError, G.neighbors, "j")
63
+ pytest.raises(nx.NetworkXError, G.neighbors, "j")
64
+
65
+ def test_successors(self):
66
+ G = self.G()
67
+ G.add_nodes_from("GJK")
68
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")])
69
+ assert sorted(G.successors("A")) == ["B", "C"]
70
+ assert sorted(G.successors("A")) == ["B", "C"]
71
+ assert sorted(G.successors("G")) == []
72
+ assert sorted(G.successors("D")) == []
73
+ assert sorted(G.successors("G")) == []
74
+ pytest.raises(nx.NetworkXError, G.successors, "j")
75
+ pytest.raises(nx.NetworkXError, G.successors, "j")
76
+
77
+ def test_predecessors(self):
78
+ G = self.G()
79
+ G.add_nodes_from("GJK")
80
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")])
81
+ assert sorted(G.predecessors("C")) == ["A", "B"]
82
+ assert sorted(G.predecessors("C")) == ["A", "B"]
83
+ assert sorted(G.predecessors("G")) == []
84
+ assert sorted(G.predecessors("A")) == []
85
+ assert sorted(G.predecessors("G")) == []
86
+ assert sorted(G.predecessors("A")) == []
87
+ assert sorted(G.successors("D")) == []
88
+
89
+ pytest.raises(nx.NetworkXError, G.predecessors, "j")
90
+ pytest.raises(nx.NetworkXError, G.predecessors, "j")
91
+
92
+ def test_reverse(self):
93
+ G = nx.complete_graph(10)
94
+ H = G.to_directed()
95
+ HR = H.reverse()
96
+ assert nx.is_isomorphic(H, HR)
97
+ assert sorted(H.edges()) == sorted(HR.edges())
98
+
99
+ def test_reverse2(self):
100
+ H = nx.DiGraph()
101
+ foo = [H.add_edge(u, u + 1) for u in range(5)]
102
+ HR = H.reverse()
103
+ for u in range(5):
104
+ assert HR.has_edge(u + 1, u)
105
+
106
+ def test_reverse3(self):
107
+ H = nx.DiGraph()
108
+ H.add_nodes_from([1, 2, 3, 4])
109
+ HR = H.reverse()
110
+ assert sorted(HR.nodes()) == [1, 2, 3, 4]
venv/lib/python3.10/site-packages/networkx/classes/tests/test_graph.py ADDED
@@ -0,0 +1,920 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gc
2
+ import pickle
3
+ import platform
4
+ import weakref
5
+
6
+ import pytest
7
+
8
+ import networkx as nx
9
+ from networkx.utils import edges_equal, graphs_equal, nodes_equal
10
+
11
+
12
+ class BaseGraphTester:
13
+ """Tests for data-structure independent graph class features."""
14
+
15
+ def test_contains(self):
16
+ G = self.K3
17
+ assert 1 in G
18
+ assert 4 not in G
19
+ assert "b" not in G
20
+ assert [] not in G # no exception for nonhashable
21
+ assert {1: 1} not in G # no exception for nonhashable
22
+
23
+ def test_order(self):
24
+ G = self.K3
25
+ assert len(G) == 3
26
+ assert G.order() == 3
27
+ assert G.number_of_nodes() == 3
28
+
29
+ def test_nodes(self):
30
+ G = self.K3
31
+ assert isinstance(G._node, G.node_dict_factory)
32
+ assert isinstance(G._adj, G.adjlist_outer_dict_factory)
33
+ assert all(
34
+ isinstance(adj, G.adjlist_inner_dict_factory) for adj in G._adj.values()
35
+ )
36
+ assert sorted(G.nodes()) == self.k3nodes
37
+ assert sorted(G.nodes(data=True)) == [(0, {}), (1, {}), (2, {})]
38
+
39
+ def test_none_node(self):
40
+ G = self.Graph()
41
+ with pytest.raises(ValueError):
42
+ G.add_node(None)
43
+ with pytest.raises(ValueError):
44
+ G.add_nodes_from([None])
45
+ with pytest.raises(ValueError):
46
+ G.add_edge(0, None)
47
+ with pytest.raises(ValueError):
48
+ G.add_edges_from([(0, None)])
49
+
50
+ def test_has_node(self):
51
+ G = self.K3
52
+ assert G.has_node(1)
53
+ assert not G.has_node(4)
54
+ assert not G.has_node([]) # no exception for nonhashable
55
+ assert not G.has_node({1: 1}) # no exception for nonhashable
56
+
57
+ def test_has_edge(self):
58
+ G = self.K3
59
+ assert G.has_edge(0, 1)
60
+ assert not G.has_edge(0, -1)
61
+
62
+ def test_neighbors(self):
63
+ G = self.K3
64
+ assert sorted(G.neighbors(0)) == [1, 2]
65
+ with pytest.raises(nx.NetworkXError):
66
+ G.neighbors(-1)
67
+
68
+ @pytest.mark.skipif(
69
+ platform.python_implementation() == "PyPy", reason="PyPy gc is different"
70
+ )
71
+ def test_memory_leak(self):
72
+ G = self.Graph()
73
+
74
+ def count_objects_of_type(_type):
75
+ # Iterating over all objects tracked by gc can include weak references
76
+ # whose weakly-referenced objects may no longer exist. Calling `isinstance`
77
+ # on such a weak reference will raise ReferenceError. There are at least
78
+ # three workarounds for this: one is to compare type names instead of using
79
+ # `isinstance` such as `type(obj).__name__ == typename`, another is to use
80
+ # `type(obj) == _type`, and the last is to ignore ProxyTypes as we do below.
81
+ # NOTE: even if this safeguard is deemed unnecessary to pass NetworkX tests,
82
+ # we should still keep it for maximum safety for other NetworkX backends.
83
+ return sum(
84
+ 1
85
+ for obj in gc.get_objects()
86
+ if not isinstance(obj, weakref.ProxyTypes) and isinstance(obj, _type)
87
+ )
88
+
89
+ gc.collect()
90
+ before = count_objects_of_type(self.Graph)
91
+ G.copy()
92
+ gc.collect()
93
+ after = count_objects_of_type(self.Graph)
94
+ assert before == after
95
+
96
+ # test a subgraph of the base class
97
+ class MyGraph(self.Graph):
98
+ pass
99
+
100
+ gc.collect()
101
+ G = MyGraph()
102
+ before = count_objects_of_type(MyGraph)
103
+ G.copy()
104
+ gc.collect()
105
+ after = count_objects_of_type(MyGraph)
106
+ assert before == after
107
+
108
+ def test_edges(self):
109
+ G = self.K3
110
+ assert isinstance(G._adj, G.adjlist_outer_dict_factory)
111
+ assert edges_equal(G.edges(), [(0, 1), (0, 2), (1, 2)])
112
+ assert edges_equal(G.edges(0), [(0, 1), (0, 2)])
113
+ assert edges_equal(G.edges([0, 1]), [(0, 1), (0, 2), (1, 2)])
114
+ with pytest.raises(nx.NetworkXError):
115
+ G.edges(-1)
116
+
117
+ def test_degree(self):
118
+ G = self.K3
119
+ assert sorted(G.degree()) == [(0, 2), (1, 2), (2, 2)]
120
+ assert dict(G.degree()) == {0: 2, 1: 2, 2: 2}
121
+ assert G.degree(0) == 2
122
+ with pytest.raises(nx.NetworkXError):
123
+ G.degree(-1) # node not in graph
124
+
125
+ def test_size(self):
126
+ G = self.K3
127
+ assert G.size() == 3
128
+ assert G.number_of_edges() == 3
129
+
130
+ def test_nbunch_iter(self):
131
+ G = self.K3
132
+ assert nodes_equal(G.nbunch_iter(), self.k3nodes) # all nodes
133
+ assert nodes_equal(G.nbunch_iter(0), [0]) # single node
134
+ assert nodes_equal(G.nbunch_iter([0, 1]), [0, 1]) # sequence
135
+ # sequence with none in graph
136
+ assert nodes_equal(G.nbunch_iter([-1]), [])
137
+ # string sequence with none in graph
138
+ assert nodes_equal(G.nbunch_iter("foo"), [])
139
+ # node not in graph doesn't get caught upon creation of iterator
140
+ bunch = G.nbunch_iter(-1)
141
+ # but gets caught when iterator used
142
+ with pytest.raises(nx.NetworkXError, match="is not a node or a sequence"):
143
+ list(bunch)
144
+ # unhashable doesn't get caught upon creation of iterator
145
+ bunch = G.nbunch_iter([0, 1, 2, {}])
146
+ # but gets caught when iterator hits the unhashable
147
+ with pytest.raises(
148
+ nx.NetworkXError, match="in sequence nbunch is not a valid node"
149
+ ):
150
+ list(bunch)
151
+
152
+ def test_nbunch_iter_node_format_raise(self):
153
+ # Tests that a node that would have failed string formatting
154
+ # doesn't cause an error when attempting to raise a
155
+ # :exc:`nx.NetworkXError`.
156
+
157
+ # For more information, see pull request #1813.
158
+ G = self.Graph()
159
+ nbunch = [("x", set())]
160
+ with pytest.raises(nx.NetworkXError):
161
+ list(G.nbunch_iter(nbunch))
162
+
163
+ def test_selfloop_degree(self):
164
+ G = self.Graph()
165
+ G.add_edge(1, 1)
166
+ assert sorted(G.degree()) == [(1, 2)]
167
+ assert dict(G.degree()) == {1: 2}
168
+ assert G.degree(1) == 2
169
+ assert sorted(G.degree([1])) == [(1, 2)]
170
+ assert G.degree(1, weight="weight") == 2
171
+
172
+ def test_selfloops(self):
173
+ G = self.K3.copy()
174
+ G.add_edge(0, 0)
175
+ assert nodes_equal(nx.nodes_with_selfloops(G), [0])
176
+ assert edges_equal(nx.selfloop_edges(G), [(0, 0)])
177
+ assert nx.number_of_selfloops(G) == 1
178
+ G.remove_edge(0, 0)
179
+ G.add_edge(0, 0)
180
+ G.remove_edges_from([(0, 0)])
181
+ G.add_edge(1, 1)
182
+ G.remove_node(1)
183
+ G.add_edge(0, 0)
184
+ G.add_edge(1, 1)
185
+ G.remove_nodes_from([0, 1])
186
+
187
+ def test_cache_reset(self):
188
+ G = self.K3.copy()
189
+ old_adj = G.adj
190
+ assert id(G.adj) == id(old_adj)
191
+ G._adj = {}
192
+ assert id(G.adj) != id(old_adj)
193
+
194
+ old_nodes = G.nodes
195
+ assert id(G.nodes) == id(old_nodes)
196
+ G._node = {}
197
+ assert id(G.nodes) != id(old_nodes)
198
+
199
+ def test_attributes_cached(self):
200
+ G = self.K3.copy()
201
+ assert id(G.nodes) == id(G.nodes)
202
+ assert id(G.edges) == id(G.edges)
203
+ assert id(G.degree) == id(G.degree)
204
+ assert id(G.adj) == id(G.adj)
205
+
206
+
207
+ class BaseAttrGraphTester(BaseGraphTester):
208
+ """Tests of graph class attribute features."""
209
+
210
+ def test_weighted_degree(self):
211
+ G = self.Graph()
212
+ G.add_edge(1, 2, weight=2, other=3)
213
+ G.add_edge(2, 3, weight=3, other=4)
214
+ assert sorted(d for n, d in G.degree(weight="weight")) == [2, 3, 5]
215
+ assert dict(G.degree(weight="weight")) == {1: 2, 2: 5, 3: 3}
216
+ assert G.degree(1, weight="weight") == 2
217
+ assert nodes_equal((G.degree([1], weight="weight")), [(1, 2)])
218
+
219
+ assert nodes_equal((d for n, d in G.degree(weight="other")), [3, 7, 4])
220
+ assert dict(G.degree(weight="other")) == {1: 3, 2: 7, 3: 4}
221
+ assert G.degree(1, weight="other") == 3
222
+ assert edges_equal((G.degree([1], weight="other")), [(1, 3)])
223
+
224
+ def add_attributes(self, G):
225
+ G.graph["foo"] = []
226
+ G.nodes[0]["foo"] = []
227
+ G.remove_edge(1, 2)
228
+ ll = []
229
+ G.add_edge(1, 2, foo=ll)
230
+ G.add_edge(2, 1, foo=ll)
231
+
232
+ def test_name(self):
233
+ G = self.Graph(name="")
234
+ assert G.name == ""
235
+ G = self.Graph(name="test")
236
+ assert G.name == "test"
237
+
238
+ def test_str_unnamed(self):
239
+ G = self.Graph()
240
+ G.add_edges_from([(1, 2), (2, 3)])
241
+ assert str(G) == f"{type(G).__name__} with 3 nodes and 2 edges"
242
+
243
+ def test_str_named(self):
244
+ G = self.Graph(name="foo")
245
+ G.add_edges_from([(1, 2), (2, 3)])
246
+ assert str(G) == f"{type(G).__name__} named 'foo' with 3 nodes and 2 edges"
247
+
248
+ def test_graph_chain(self):
249
+ G = self.Graph([(0, 1), (1, 2)])
250
+ DG = G.to_directed(as_view=True)
251
+ SDG = DG.subgraph([0, 1])
252
+ RSDG = SDG.reverse(copy=False)
253
+ assert G is DG._graph
254
+ assert DG is SDG._graph
255
+ assert SDG is RSDG._graph
256
+
257
+ def test_copy(self):
258
+ G = self.Graph()
259
+ G.add_node(0)
260
+ G.add_edge(1, 2)
261
+ self.add_attributes(G)
262
+ # copy edge datadict but any container attr are same
263
+ H = G.copy()
264
+ self.graphs_equal(H, G)
265
+ self.different_attrdict(H, G)
266
+ self.shallow_copy_attrdict(H, G)
267
+
268
+ def test_class_copy(self):
269
+ G = self.Graph()
270
+ G.add_node(0)
271
+ G.add_edge(1, 2)
272
+ self.add_attributes(G)
273
+ # copy edge datadict but any container attr are same
274
+ H = G.__class__(G)
275
+ self.graphs_equal(H, G)
276
+ self.different_attrdict(H, G)
277
+ self.shallow_copy_attrdict(H, G)
278
+
279
+ def test_fresh_copy(self):
280
+ G = self.Graph()
281
+ G.add_node(0)
282
+ G.add_edge(1, 2)
283
+ self.add_attributes(G)
284
+ # copy graph structure but use fresh datadict
285
+ H = G.__class__()
286
+ H.add_nodes_from(G)
287
+ H.add_edges_from(G.edges())
288
+ assert len(G.nodes[0]) == 1
289
+ ddict = G.adj[1][2][0] if G.is_multigraph() else G.adj[1][2]
290
+ assert len(ddict) == 1
291
+ assert len(H.nodes[0]) == 0
292
+ ddict = H.adj[1][2][0] if H.is_multigraph() else H.adj[1][2]
293
+ assert len(ddict) == 0
294
+
295
+ def is_deepcopy(self, H, G):
296
+ self.graphs_equal(H, G)
297
+ self.different_attrdict(H, G)
298
+ self.deep_copy_attrdict(H, G)
299
+
300
+ def deep_copy_attrdict(self, H, G):
301
+ self.deepcopy_graph_attr(H, G)
302
+ self.deepcopy_node_attr(H, G)
303
+ self.deepcopy_edge_attr(H, G)
304
+
305
+ def deepcopy_graph_attr(self, H, G):
306
+ assert G.graph["foo"] == H.graph["foo"]
307
+ G.graph["foo"].append(1)
308
+ assert G.graph["foo"] != H.graph["foo"]
309
+
310
+ def deepcopy_node_attr(self, H, G):
311
+ assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
312
+ G.nodes[0]["foo"].append(1)
313
+ assert G.nodes[0]["foo"] != H.nodes[0]["foo"]
314
+
315
+ def deepcopy_edge_attr(self, H, G):
316
+ assert G[1][2]["foo"] == H[1][2]["foo"]
317
+ G[1][2]["foo"].append(1)
318
+ assert G[1][2]["foo"] != H[1][2]["foo"]
319
+
320
+ def is_shallow_copy(self, H, G):
321
+ self.graphs_equal(H, G)
322
+ self.shallow_copy_attrdict(H, G)
323
+
324
+ def shallow_copy_attrdict(self, H, G):
325
+ self.shallow_copy_graph_attr(H, G)
326
+ self.shallow_copy_node_attr(H, G)
327
+ self.shallow_copy_edge_attr(H, G)
328
+
329
+ def shallow_copy_graph_attr(self, H, G):
330
+ assert G.graph["foo"] == H.graph["foo"]
331
+ G.graph["foo"].append(1)
332
+ assert G.graph["foo"] == H.graph["foo"]
333
+
334
+ def shallow_copy_node_attr(self, H, G):
335
+ assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
336
+ G.nodes[0]["foo"].append(1)
337
+ assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
338
+
339
+ def shallow_copy_edge_attr(self, H, G):
340
+ assert G[1][2]["foo"] == H[1][2]["foo"]
341
+ G[1][2]["foo"].append(1)
342
+ assert G[1][2]["foo"] == H[1][2]["foo"]
343
+
344
+ def same_attrdict(self, H, G):
345
+ old_foo = H[1][2]["foo"]
346
+ H.adj[1][2]["foo"] = "baz"
347
+ assert G.edges == H.edges
348
+ H.adj[1][2]["foo"] = old_foo
349
+ assert G.edges == H.edges
350
+
351
+ old_foo = H.nodes[0]["foo"]
352
+ H.nodes[0]["foo"] = "baz"
353
+ assert G.nodes == H.nodes
354
+ H.nodes[0]["foo"] = old_foo
355
+ assert G.nodes == H.nodes
356
+
357
+ def different_attrdict(self, H, G):
358
+ old_foo = H[1][2]["foo"]
359
+ H.adj[1][2]["foo"] = "baz"
360
+ assert G._adj != H._adj
361
+ H.adj[1][2]["foo"] = old_foo
362
+ assert G._adj == H._adj
363
+
364
+ old_foo = H.nodes[0]["foo"]
365
+ H.nodes[0]["foo"] = "baz"
366
+ assert G._node != H._node
367
+ H.nodes[0]["foo"] = old_foo
368
+ assert G._node == H._node
369
+
370
+ def graphs_equal(self, H, G):
371
+ assert G._adj == H._adj
372
+ assert G._node == H._node
373
+ assert G.graph == H.graph
374
+ assert G.name == H.name
375
+ if not G.is_directed() and not H.is_directed():
376
+ assert H._adj[1][2] is H._adj[2][1]
377
+ assert G._adj[1][2] is G._adj[2][1]
378
+ else: # at least one is directed
379
+ if not G.is_directed():
380
+ G._pred = G._adj
381
+ G._succ = G._adj
382
+ if not H.is_directed():
383
+ H._pred = H._adj
384
+ H._succ = H._adj
385
+ assert G._pred == H._pred
386
+ assert G._succ == H._succ
387
+ assert H._succ[1][2] is H._pred[2][1]
388
+ assert G._succ[1][2] is G._pred[2][1]
389
+
390
+ def test_graph_attr(self):
391
+ G = self.K3.copy()
392
+ G.graph["foo"] = "bar"
393
+ assert isinstance(G.graph, G.graph_attr_dict_factory)
394
+ assert G.graph["foo"] == "bar"
395
+ del G.graph["foo"]
396
+ assert G.graph == {}
397
+ H = self.Graph(foo="bar")
398
+ assert H.graph["foo"] == "bar"
399
+
400
+ def test_node_attr(self):
401
+ G = self.K3.copy()
402
+ G.add_node(1, foo="bar")
403
+ assert all(
404
+ isinstance(d, G.node_attr_dict_factory) for u, d in G.nodes(data=True)
405
+ )
406
+ assert nodes_equal(G.nodes(), [0, 1, 2])
407
+ assert nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "bar"}), (2, {})])
408
+ G.nodes[1]["foo"] = "baz"
409
+ assert nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "baz"}), (2, {})])
410
+ assert nodes_equal(G.nodes(data="foo"), [(0, None), (1, "baz"), (2, None)])
411
+ assert nodes_equal(
412
+ G.nodes(data="foo", default="bar"), [(0, "bar"), (1, "baz"), (2, "bar")]
413
+ )
414
+
415
+ def test_node_attr2(self):
416
+ G = self.K3.copy()
417
+ a = {"foo": "bar"}
418
+ G.add_node(3, **a)
419
+ assert nodes_equal(G.nodes(), [0, 1, 2, 3])
420
+ assert nodes_equal(
421
+ G.nodes(data=True), [(0, {}), (1, {}), (2, {}), (3, {"foo": "bar"})]
422
+ )
423
+
424
+ def test_edge_lookup(self):
425
+ G = self.Graph()
426
+ G.add_edge(1, 2, foo="bar")
427
+ assert edges_equal(G.edges[1, 2], {"foo": "bar"})
428
+
429
+ def test_edge_attr(self):
430
+ G = self.Graph()
431
+ G.add_edge(1, 2, foo="bar")
432
+ assert all(
433
+ isinstance(d, G.edge_attr_dict_factory) for u, v, d in G.edges(data=True)
434
+ )
435
+ assert edges_equal(G.edges(data=True), [(1, 2, {"foo": "bar"})])
436
+ assert edges_equal(G.edges(data="foo"), [(1, 2, "bar")])
437
+
438
+ def test_edge_attr2(self):
439
+ G = self.Graph()
440
+ G.add_edges_from([(1, 2), (3, 4)], foo="foo")
441
+ assert edges_equal(
442
+ G.edges(data=True), [(1, 2, {"foo": "foo"}), (3, 4, {"foo": "foo"})]
443
+ )
444
+ assert edges_equal(G.edges(data="foo"), [(1, 2, "foo"), (3, 4, "foo")])
445
+
446
+ def test_edge_attr3(self):
447
+ G = self.Graph()
448
+ G.add_edges_from([(1, 2, {"weight": 32}), (3, 4, {"weight": 64})], foo="foo")
449
+ assert edges_equal(
450
+ G.edges(data=True),
451
+ [
452
+ (1, 2, {"foo": "foo", "weight": 32}),
453
+ (3, 4, {"foo": "foo", "weight": 64}),
454
+ ],
455
+ )
456
+
457
+ G.remove_edges_from([(1, 2), (3, 4)])
458
+ G.add_edge(1, 2, data=7, spam="bar", bar="foo")
459
+ assert edges_equal(
460
+ G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})]
461
+ )
462
+
463
+ def test_edge_attr4(self):
464
+ G = self.Graph()
465
+ G.add_edge(1, 2, data=7, spam="bar", bar="foo")
466
+ assert edges_equal(
467
+ G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})]
468
+ )
469
+ G[1][2]["data"] = 10 # OK to set data like this
470
+ assert edges_equal(
471
+ G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})]
472
+ )
473
+
474
+ G.adj[1][2]["data"] = 20
475
+ assert edges_equal(
476
+ G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})]
477
+ )
478
+ G.edges[1, 2]["data"] = 21 # another spelling, "edge"
479
+ assert edges_equal(
480
+ G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})]
481
+ )
482
+ G.adj[1][2]["listdata"] = [20, 200]
483
+ G.adj[1][2]["weight"] = 20
484
+ dd = {
485
+ "data": 21,
486
+ "spam": "bar",
487
+ "bar": "foo",
488
+ "listdata": [20, 200],
489
+ "weight": 20,
490
+ }
491
+ assert edges_equal(G.edges(data=True), [(1, 2, dd)])
492
+
493
+ def test_to_undirected(self):
494
+ G = self.K3
495
+ self.add_attributes(G)
496
+ H = nx.Graph(G)
497
+ self.is_shallow_copy(H, G)
498
+ self.different_attrdict(H, G)
499
+ H = G.to_undirected()
500
+ self.is_deepcopy(H, G)
501
+
502
+ def test_to_directed_as_view(self):
503
+ H = nx.path_graph(2, create_using=self.Graph)
504
+ H2 = H.to_directed(as_view=True)
505
+ assert H is H2._graph
506
+ assert H2.has_edge(0, 1)
507
+ assert H2.has_edge(1, 0) or H.is_directed()
508
+ pytest.raises(nx.NetworkXError, H2.add_node, -1)
509
+ pytest.raises(nx.NetworkXError, H2.add_edge, 1, 2)
510
+ H.add_edge(1, 2)
511
+ assert H2.has_edge(1, 2)
512
+ assert H2.has_edge(2, 1) or H.is_directed()
513
+
514
+ def test_to_undirected_as_view(self):
515
+ H = nx.path_graph(2, create_using=self.Graph)
516
+ H2 = H.to_undirected(as_view=True)
517
+ assert H is H2._graph
518
+ assert H2.has_edge(0, 1)
519
+ assert H2.has_edge(1, 0)
520
+ pytest.raises(nx.NetworkXError, H2.add_node, -1)
521
+ pytest.raises(nx.NetworkXError, H2.add_edge, 1, 2)
522
+ H.add_edge(1, 2)
523
+ assert H2.has_edge(1, 2)
524
+ assert H2.has_edge(2, 1)
525
+
526
+ def test_directed_class(self):
527
+ G = self.Graph()
528
+
529
+ class newGraph(G.to_undirected_class()):
530
+ def to_directed_class(self):
531
+ return newDiGraph
532
+
533
+ def to_undirected_class(self):
534
+ return newGraph
535
+
536
+ class newDiGraph(G.to_directed_class()):
537
+ def to_directed_class(self):
538
+ return newDiGraph
539
+
540
+ def to_undirected_class(self):
541
+ return newGraph
542
+
543
+ G = newDiGraph() if G.is_directed() else newGraph()
544
+ H = G.to_directed()
545
+ assert isinstance(H, newDiGraph)
546
+ H = G.to_undirected()
547
+ assert isinstance(H, newGraph)
548
+
549
+ def test_to_directed(self):
550
+ G = self.K3
551
+ self.add_attributes(G)
552
+ H = nx.DiGraph(G)
553
+ self.is_shallow_copy(H, G)
554
+ self.different_attrdict(H, G)
555
+ H = G.to_directed()
556
+ self.is_deepcopy(H, G)
557
+
558
+ def test_subgraph(self):
559
+ G = self.K3
560
+ self.add_attributes(G)
561
+ H = G.subgraph([0, 1, 2, 5])
562
+ self.graphs_equal(H, G)
563
+ self.same_attrdict(H, G)
564
+ self.shallow_copy_attrdict(H, G)
565
+
566
+ H = G.subgraph(0)
567
+ assert H.adj == {0: {}}
568
+ H = G.subgraph([])
569
+ assert H.adj == {}
570
+ assert G.adj != {}
571
+
572
+ def test_selfloops_attr(self):
573
+ G = self.K3.copy()
574
+ G.add_edge(0, 0)
575
+ G.add_edge(1, 1, weight=2)
576
+ assert edges_equal(
577
+ nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})]
578
+ )
579
+ assert edges_equal(
580
+ nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)]
581
+ )
582
+
583
+
584
+ class TestGraph(BaseAttrGraphTester):
585
+ """Tests specific to dict-of-dict-of-dict graph data structure"""
586
+
587
+ def setup_method(self):
588
+ self.Graph = nx.Graph
589
+ # build dict-of-dict-of-dict K3
590
+ ed1, ed2, ed3 = ({}, {}, {})
591
+ self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}}
592
+ self.k3edges = [(0, 1), (0, 2), (1, 2)]
593
+ self.k3nodes = [0, 1, 2]
594
+ self.K3 = self.Graph()
595
+ self.K3._adj = self.k3adj
596
+ self.K3._node = {}
597
+ self.K3._node[0] = {}
598
+ self.K3._node[1] = {}
599
+ self.K3._node[2] = {}
600
+
601
+ def test_pickle(self):
602
+ G = self.K3
603
+ pg = pickle.loads(pickle.dumps(G, -1))
604
+ self.graphs_equal(pg, G)
605
+ pg = pickle.loads(pickle.dumps(G))
606
+ self.graphs_equal(pg, G)
607
+
608
+ def test_data_input(self):
609
+ G = self.Graph({1: [2], 2: [1]}, name="test")
610
+ assert G.name == "test"
611
+ assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})]
612
+
613
+ def test_adjacency(self):
614
+ G = self.K3
615
+ assert dict(G.adjacency()) == {
616
+ 0: {1: {}, 2: {}},
617
+ 1: {0: {}, 2: {}},
618
+ 2: {0: {}, 1: {}},
619
+ }
620
+
621
+ def test_getitem(self):
622
+ G = self.K3
623
+ assert G.adj[0] == {1: {}, 2: {}}
624
+ assert G[0] == {1: {}, 2: {}}
625
+ with pytest.raises(KeyError):
626
+ G.__getitem__("j")
627
+ with pytest.raises(TypeError):
628
+ G.__getitem__(["A"])
629
+
630
+ def test_add_node(self):
631
+ G = self.Graph()
632
+ G.add_node(0)
633
+ assert G.adj == {0: {}}
634
+ # test add attributes
635
+ G.add_node(1, c="red")
636
+ G.add_node(2, c="blue")
637
+ G.add_node(3, c="red")
638
+ assert G.nodes[1]["c"] == "red"
639
+ assert G.nodes[2]["c"] == "blue"
640
+ assert G.nodes[3]["c"] == "red"
641
+ # test updating attributes
642
+ G.add_node(1, c="blue")
643
+ G.add_node(2, c="red")
644
+ G.add_node(3, c="blue")
645
+ assert G.nodes[1]["c"] == "blue"
646
+ assert G.nodes[2]["c"] == "red"
647
+ assert G.nodes[3]["c"] == "blue"
648
+
649
+ def test_add_nodes_from(self):
650
+ G = self.Graph()
651
+ G.add_nodes_from([0, 1, 2])
652
+ assert G.adj == {0: {}, 1: {}, 2: {}}
653
+ # test add attributes
654
+ G.add_nodes_from([0, 1, 2], c="red")
655
+ assert G.nodes[0]["c"] == "red"
656
+ assert G.nodes[2]["c"] == "red"
657
+ # test that attribute dicts are not the same
658
+ assert G.nodes[0] is not G.nodes[1]
659
+ # test updating attributes
660
+ G.add_nodes_from([0, 1, 2], c="blue")
661
+ assert G.nodes[0]["c"] == "blue"
662
+ assert G.nodes[2]["c"] == "blue"
663
+ assert G.nodes[0] is not G.nodes[1]
664
+ # test tuple input
665
+ H = self.Graph()
666
+ H.add_nodes_from(G.nodes(data=True))
667
+ assert H.nodes[0]["c"] == "blue"
668
+ assert H.nodes[2]["c"] == "blue"
669
+ assert H.nodes[0] is not H.nodes[1]
670
+ # specific overrides general
671
+ H.add_nodes_from([0, (1, {"c": "green"}), (3, {"c": "cyan"})], c="red")
672
+ assert H.nodes[0]["c"] == "red"
673
+ assert H.nodes[1]["c"] == "green"
674
+ assert H.nodes[2]["c"] == "blue"
675
+ assert H.nodes[3]["c"] == "cyan"
676
+
677
+ def test_remove_node(self):
678
+ G = self.K3.copy()
679
+ G.remove_node(0)
680
+ assert G.adj == {1: {2: {}}, 2: {1: {}}}
681
+ with pytest.raises(nx.NetworkXError):
682
+ G.remove_node(-1)
683
+
684
+ # generator here to implement list,set,string...
685
+
686
+ def test_remove_nodes_from(self):
687
+ G = self.K3.copy()
688
+ G.remove_nodes_from([0, 1])
689
+ assert G.adj == {2: {}}
690
+ G.remove_nodes_from([-1]) # silent fail
691
+
692
+ def test_add_edge(self):
693
+ G = self.Graph()
694
+ G.add_edge(0, 1)
695
+ assert G.adj == {0: {1: {}}, 1: {0: {}}}
696
+ G = self.Graph()
697
+ G.add_edge(*(0, 1))
698
+ assert G.adj == {0: {1: {}}, 1: {0: {}}}
699
+ G = self.Graph()
700
+ with pytest.raises(ValueError):
701
+ G.add_edge(None, "anything")
702
+
703
+ def test_add_edges_from(self):
704
+ G = self.Graph()
705
+ G.add_edges_from([(0, 1), (0, 2, {"weight": 3})])
706
+ assert G.adj == {
707
+ 0: {1: {}, 2: {"weight": 3}},
708
+ 1: {0: {}},
709
+ 2: {0: {"weight": 3}},
710
+ }
711
+ G = self.Graph()
712
+ G.add_edges_from([(0, 1), (0, 2, {"weight": 3}), (1, 2, {"data": 4})], data=2)
713
+ assert G.adj == {
714
+ 0: {1: {"data": 2}, 2: {"weight": 3, "data": 2}},
715
+ 1: {0: {"data": 2}, 2: {"data": 4}},
716
+ 2: {0: {"weight": 3, "data": 2}, 1: {"data": 4}},
717
+ }
718
+
719
+ with pytest.raises(nx.NetworkXError):
720
+ G.add_edges_from([(0,)]) # too few in tuple
721
+ with pytest.raises(nx.NetworkXError):
722
+ G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple
723
+ with pytest.raises(TypeError):
724
+ G.add_edges_from([0]) # not a tuple
725
+ with pytest.raises(ValueError):
726
+ G.add_edges_from([(None, 3), (3, 2)]) # None cannot be a node
727
+
728
+ def test_remove_edge(self):
729
+ G = self.K3.copy()
730
+ G.remove_edge(0, 1)
731
+ assert G.adj == {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}}
732
+ with pytest.raises(nx.NetworkXError):
733
+ G.remove_edge(-1, 0)
734
+
735
+ def test_remove_edges_from(self):
736
+ G = self.K3.copy()
737
+ G.remove_edges_from([(0, 1)])
738
+ assert G.adj == {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}}
739
+ G.remove_edges_from([(0, 0)]) # silent fail
740
+
741
+ def test_clear(self):
742
+ G = self.K3.copy()
743
+ G.graph["name"] = "K3"
744
+ G.clear()
745
+ assert list(G.nodes) == []
746
+ assert G.adj == {}
747
+ assert G.graph == {}
748
+
749
+ def test_clear_edges(self):
750
+ G = self.K3.copy()
751
+ G.graph["name"] = "K3"
752
+ nodes = list(G.nodes)
753
+ G.clear_edges()
754
+ assert list(G.nodes) == nodes
755
+ assert G.adj == {0: {}, 1: {}, 2: {}}
756
+ assert list(G.edges) == []
757
+ assert G.graph["name"] == "K3"
758
+
759
+ def test_edges_data(self):
760
+ G = self.K3
761
+ all_edges = [(0, 1, {}), (0, 2, {}), (1, 2, {})]
762
+ assert edges_equal(G.edges(data=True), all_edges)
763
+ assert edges_equal(G.edges(0, data=True), [(0, 1, {}), (0, 2, {})])
764
+ assert edges_equal(G.edges([0, 1], data=True), all_edges)
765
+ with pytest.raises(nx.NetworkXError):
766
+ G.edges(-1, True)
767
+
768
+ def test_get_edge_data(self):
769
+ G = self.K3.copy()
770
+ assert G.get_edge_data(0, 1) == {}
771
+ assert G[0][1] == {}
772
+ assert G.get_edge_data(10, 20) is None
773
+ assert G.get_edge_data(-1, 0) is None
774
+ assert G.get_edge_data(-1, 0, default=1) == 1
775
+
776
+ def test_update(self):
777
+ # specify both edges and nodes
778
+ G = self.K3.copy()
779
+ G.update(nodes=[3, (4, {"size": 2})], edges=[(4, 5), (6, 7, {"weight": 2})])
780
+ nlist = [
781
+ (0, {}),
782
+ (1, {}),
783
+ (2, {}),
784
+ (3, {}),
785
+ (4, {"size": 2}),
786
+ (5, {}),
787
+ (6, {}),
788
+ (7, {}),
789
+ ]
790
+ assert sorted(G.nodes.data()) == nlist
791
+ if G.is_directed():
792
+ elist = [
793
+ (0, 1, {}),
794
+ (0, 2, {}),
795
+ (1, 0, {}),
796
+ (1, 2, {}),
797
+ (2, 0, {}),
798
+ (2, 1, {}),
799
+ (4, 5, {}),
800
+ (6, 7, {"weight": 2}),
801
+ ]
802
+ else:
803
+ elist = [
804
+ (0, 1, {}),
805
+ (0, 2, {}),
806
+ (1, 2, {}),
807
+ (4, 5, {}),
808
+ (6, 7, {"weight": 2}),
809
+ ]
810
+ assert sorted(G.edges.data()) == elist
811
+ assert G.graph == {}
812
+
813
+ # no keywords -- order is edges, nodes
814
+ G = self.K3.copy()
815
+ G.update([(4, 5), (6, 7, {"weight": 2})], [3, (4, {"size": 2})])
816
+ assert sorted(G.nodes.data()) == nlist
817
+ assert sorted(G.edges.data()) == elist
818
+ assert G.graph == {}
819
+
820
+ # update using only a graph
821
+ G = self.Graph()
822
+ G.graph["foo"] = "bar"
823
+ G.add_node(2, data=4)
824
+ G.add_edge(0, 1, weight=0.5)
825
+ GG = G.copy()
826
+ H = self.Graph()
827
+ GG.update(H)
828
+ assert graphs_equal(G, GG)
829
+ H.update(G)
830
+ assert graphs_equal(H, G)
831
+
832
+ # update nodes only
833
+ H = self.Graph()
834
+ H.update(nodes=[3, 4])
835
+ assert H.nodes ^ {3, 4} == set()
836
+ assert H.size() == 0
837
+
838
+ # update edges only
839
+ H = self.Graph()
840
+ H.update(edges=[(3, 4)])
841
+ assert sorted(H.edges.data()) == [(3, 4, {})]
842
+ assert H.size() == 1
843
+
844
+ # No inputs -> exception
845
+ with pytest.raises(nx.NetworkXError):
846
+ nx.Graph().update()
847
+
848
+
849
+ class TestEdgeSubgraph:
850
+ """Unit tests for the :meth:`Graph.edge_subgraph` method."""
851
+
852
+ def setup_method(self):
853
+ # Create a path graph on five nodes.
854
+ G = nx.path_graph(5)
855
+ # Add some node, edge, and graph attributes.
856
+ for i in range(5):
857
+ G.nodes[i]["name"] = f"node{i}"
858
+ G.edges[0, 1]["name"] = "edge01"
859
+ G.edges[3, 4]["name"] = "edge34"
860
+ G.graph["name"] = "graph"
861
+ # Get the subgraph induced by the first and last edges.
862
+ self.G = G
863
+ self.H = G.edge_subgraph([(0, 1), (3, 4)])
864
+
865
+ def test_correct_nodes(self):
866
+ """Tests that the subgraph has the correct nodes."""
867
+ assert [0, 1, 3, 4] == sorted(self.H.nodes())
868
+
869
+ def test_correct_edges(self):
870
+ """Tests that the subgraph has the correct edges."""
871
+ assert [(0, 1, "edge01"), (3, 4, "edge34")] == sorted(self.H.edges(data="name"))
872
+
873
+ def test_add_node(self):
874
+ """Tests that adding a node to the original graph does not
875
+ affect the nodes of the subgraph.
876
+
877
+ """
878
+ self.G.add_node(5)
879
+ assert [0, 1, 3, 4] == sorted(self.H.nodes())
880
+
881
+ def test_remove_node(self):
882
+ """Tests that removing a node in the original graph does
883
+ affect the nodes of the subgraph.
884
+
885
+ """
886
+ self.G.remove_node(0)
887
+ assert [1, 3, 4] == sorted(self.H.nodes())
888
+
889
+ def test_node_attr_dict(self):
890
+ """Tests that the node attribute dictionary of the two graphs is
891
+ the same object.
892
+
893
+ """
894
+ for v in self.H:
895
+ assert self.G.nodes[v] == self.H.nodes[v]
896
+ # Making a change to G should make a change in H and vice versa.
897
+ self.G.nodes[0]["name"] = "foo"
898
+ assert self.G.nodes[0] == self.H.nodes[0]
899
+ self.H.nodes[1]["name"] = "bar"
900
+ assert self.G.nodes[1] == self.H.nodes[1]
901
+
902
+ def test_edge_attr_dict(self):
903
+ """Tests that the edge attribute dictionary of the two graphs is
904
+ the same object.
905
+
906
+ """
907
+ for u, v in self.H.edges():
908
+ assert self.G.edges[u, v] == self.H.edges[u, v]
909
+ # Making a change to G should make a change in H and vice versa.
910
+ self.G.edges[0, 1]["name"] = "foo"
911
+ assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"]
912
+ self.H.edges[3, 4]["name"] = "bar"
913
+ assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"]
914
+
915
+ def test_graph_attr_dict(self):
916
+ """Tests that the graph attribute dictionary of the two graphs
917
+ is the same object.
918
+
919
+ """
920
+ assert self.G.graph is self.H.graph
venv/lib/python3.10/site-packages/networkx/classes/tests/test_graph_historical.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Original NetworkX graph tests"""
2
+ import networkx
3
+ import networkx as nx
4
+
5
+ from .historical_tests import HistoricalTests
6
+
7
+
8
+ class TestGraphHistorical(HistoricalTests):
9
+ @classmethod
10
+ def setup_class(cls):
11
+ HistoricalTests.setup_class()
12
+ cls.G = nx.Graph
venv/lib/python3.10/site-packages/networkx/classes/tests/test_graphviews.py ADDED
@@ -0,0 +1,350 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.utils import edges_equal, nodes_equal
5
+
6
+ # Note: SubGraph views are not tested here. They have their own testing file
7
+
8
+
9
+ class TestReverseView:
10
+ def setup_method(self):
11
+ self.G = nx.path_graph(9, create_using=nx.DiGraph())
12
+ self.rv = nx.reverse_view(self.G)
13
+
14
+ def test_pickle(self):
15
+ import pickle
16
+
17
+ rv = self.rv
18
+ prv = pickle.loads(pickle.dumps(rv, -1))
19
+ assert rv._node == prv._node
20
+ assert rv._adj == prv._adj
21
+ assert rv.graph == prv.graph
22
+
23
+ def test_contains(self):
24
+ assert (2, 3) in self.G.edges
25
+ assert (3, 2) not in self.G.edges
26
+ assert (2, 3) not in self.rv.edges
27
+ assert (3, 2) in self.rv.edges
28
+
29
+ def test_iter(self):
30
+ expected = sorted(tuple(reversed(e)) for e in self.G.edges)
31
+ assert sorted(self.rv.edges) == expected
32
+
33
+ def test_exceptions(self):
34
+ G = nx.Graph()
35
+ pytest.raises(nx.NetworkXNotImplemented, nx.reverse_view, G)
36
+
37
+ def test_subclass(self):
38
+ class MyGraph(nx.DiGraph):
39
+ def my_method(self):
40
+ return "me"
41
+
42
+ def to_directed_class(self):
43
+ return MyGraph()
44
+
45
+ M = MyGraph()
46
+ M.add_edge(1, 2)
47
+ RM = nx.reverse_view(M)
48
+ print("RM class", RM.__class__)
49
+ RMC = RM.copy()
50
+ print("RMC class", RMC.__class__)
51
+ print(RMC.edges)
52
+ assert RMC.has_edge(2, 1)
53
+ assert RMC.my_method() == "me"
54
+
55
+
56
+ class TestMultiReverseView:
57
+ def setup_method(self):
58
+ self.G = nx.path_graph(9, create_using=nx.MultiDiGraph())
59
+ self.G.add_edge(4, 5)
60
+ self.rv = nx.reverse_view(self.G)
61
+
62
+ def test_pickle(self):
63
+ import pickle
64
+
65
+ rv = self.rv
66
+ prv = pickle.loads(pickle.dumps(rv, -1))
67
+ assert rv._node == prv._node
68
+ assert rv._adj == prv._adj
69
+ assert rv.graph == prv.graph
70
+
71
+ def test_contains(self):
72
+ assert (2, 3, 0) in self.G.edges
73
+ assert (3, 2, 0) not in self.G.edges
74
+ assert (2, 3, 0) not in self.rv.edges
75
+ assert (3, 2, 0) in self.rv.edges
76
+ assert (5, 4, 1) in self.rv.edges
77
+ assert (4, 5, 1) not in self.rv.edges
78
+
79
+ def test_iter(self):
80
+ expected = sorted((v, u, k) for u, v, k in self.G.edges)
81
+ assert sorted(self.rv.edges) == expected
82
+
83
+ def test_exceptions(self):
84
+ MG = nx.MultiGraph(self.G)
85
+ pytest.raises(nx.NetworkXNotImplemented, nx.reverse_view, MG)
86
+
87
+
88
+ def test_generic_multitype():
89
+ nxg = nx.graphviews
90
+ G = nx.DiGraph([(1, 2)])
91
+ with pytest.raises(nx.NetworkXError):
92
+ nxg.generic_graph_view(G, create_using=nx.MultiGraph)
93
+ G = nx.MultiDiGraph([(1, 2)])
94
+ with pytest.raises(nx.NetworkXError):
95
+ nxg.generic_graph_view(G, create_using=nx.DiGraph)
96
+
97
+
98
+ class TestToDirected:
99
+ def setup_method(self):
100
+ self.G = nx.path_graph(9)
101
+ self.dv = nx.to_directed(self.G)
102
+ self.MG = nx.path_graph(9, create_using=nx.MultiGraph())
103
+ self.Mdv = nx.to_directed(self.MG)
104
+
105
+ def test_directed(self):
106
+ assert not self.G.is_directed()
107
+ assert self.dv.is_directed()
108
+
109
+ def test_already_directed(self):
110
+ dd = nx.to_directed(self.dv)
111
+ Mdd = nx.to_directed(self.Mdv)
112
+ assert edges_equal(dd.edges, self.dv.edges)
113
+ assert edges_equal(Mdd.edges, self.Mdv.edges)
114
+
115
+ def test_pickle(self):
116
+ import pickle
117
+
118
+ dv = self.dv
119
+ pdv = pickle.loads(pickle.dumps(dv, -1))
120
+ assert dv._node == pdv._node
121
+ assert dv._succ == pdv._succ
122
+ assert dv._pred == pdv._pred
123
+ assert dv.graph == pdv.graph
124
+
125
+ def test_contains(self):
126
+ assert (2, 3) in self.G.edges
127
+ assert (3, 2) in self.G.edges
128
+ assert (2, 3) in self.dv.edges
129
+ assert (3, 2) in self.dv.edges
130
+
131
+ def test_iter(self):
132
+ revd = [tuple(reversed(e)) for e in self.G.edges]
133
+ expected = sorted(list(self.G.edges) + revd)
134
+ assert sorted(self.dv.edges) == expected
135
+
136
+
137
+ class TestToUndirected:
138
+ def setup_method(self):
139
+ self.DG = nx.path_graph(9, create_using=nx.DiGraph())
140
+ self.uv = nx.to_undirected(self.DG)
141
+ self.MDG = nx.path_graph(9, create_using=nx.MultiDiGraph())
142
+ self.Muv = nx.to_undirected(self.MDG)
143
+
144
+ def test_directed(self):
145
+ assert self.DG.is_directed()
146
+ assert not self.uv.is_directed()
147
+
148
+ def test_already_directed(self):
149
+ uu = nx.to_undirected(self.uv)
150
+ Muu = nx.to_undirected(self.Muv)
151
+ assert edges_equal(uu.edges, self.uv.edges)
152
+ assert edges_equal(Muu.edges, self.Muv.edges)
153
+
154
+ def test_pickle(self):
155
+ import pickle
156
+
157
+ uv = self.uv
158
+ puv = pickle.loads(pickle.dumps(uv, -1))
159
+ assert uv._node == puv._node
160
+ assert uv._adj == puv._adj
161
+ assert uv.graph == puv.graph
162
+ assert hasattr(uv, "_graph")
163
+
164
+ def test_contains(self):
165
+ assert (2, 3) in self.DG.edges
166
+ assert (3, 2) not in self.DG.edges
167
+ assert (2, 3) in self.uv.edges
168
+ assert (3, 2) in self.uv.edges
169
+
170
+ def test_iter(self):
171
+ expected = sorted(self.DG.edges)
172
+ assert sorted(self.uv.edges) == expected
173
+
174
+
175
+ class TestChainsOfViews:
176
+ @classmethod
177
+ def setup_class(cls):
178
+ cls.G = nx.path_graph(9)
179
+ cls.DG = nx.path_graph(9, create_using=nx.DiGraph())
180
+ cls.MG = nx.path_graph(9, create_using=nx.MultiGraph())
181
+ cls.MDG = nx.path_graph(9, create_using=nx.MultiDiGraph())
182
+ cls.Gv = nx.to_undirected(cls.DG)
183
+ cls.DGv = nx.to_directed(cls.G)
184
+ cls.MGv = nx.to_undirected(cls.MDG)
185
+ cls.MDGv = nx.to_directed(cls.MG)
186
+ cls.Rv = cls.DG.reverse()
187
+ cls.MRv = cls.MDG.reverse()
188
+ cls.graphs = [
189
+ cls.G,
190
+ cls.DG,
191
+ cls.MG,
192
+ cls.MDG,
193
+ cls.Gv,
194
+ cls.DGv,
195
+ cls.MGv,
196
+ cls.MDGv,
197
+ cls.Rv,
198
+ cls.MRv,
199
+ ]
200
+ for G in cls.graphs:
201
+ G.edges, G.nodes, G.degree
202
+
203
+ def test_pickle(self):
204
+ import pickle
205
+
206
+ for G in self.graphs:
207
+ H = pickle.loads(pickle.dumps(G, -1))
208
+ assert edges_equal(H.edges, G.edges)
209
+ assert nodes_equal(H.nodes, G.nodes)
210
+
211
+ def test_subgraph_of_subgraph(self):
212
+ SGv = nx.subgraph(self.G, range(3, 7))
213
+ SDGv = nx.subgraph(self.DG, range(3, 7))
214
+ SMGv = nx.subgraph(self.MG, range(3, 7))
215
+ SMDGv = nx.subgraph(self.MDG, range(3, 7))
216
+ for G in self.graphs + [SGv, SDGv, SMGv, SMDGv]:
217
+ SG = nx.induced_subgraph(G, [4, 5, 6])
218
+ assert list(SG) == [4, 5, 6]
219
+ SSG = SG.subgraph([6, 7])
220
+ assert list(SSG) == [6]
221
+ # subgraph-subgraph chain is short-cut in base class method
222
+ assert SSG._graph is G
223
+
224
+ def test_restricted_induced_subgraph_chains(self):
225
+ """Test subgraph chains that both restrict and show nodes/edges.
226
+
227
+ A restricted_view subgraph should allow induced subgraphs using
228
+ G.subgraph that automagically without a chain (meaning the result
229
+ is a subgraph view of the original graph not a subgraph-of-subgraph.
230
+ """
231
+ hide_nodes = [3, 4, 5]
232
+ hide_edges = [(6, 7)]
233
+ RG = nx.restricted_view(self.G, hide_nodes, hide_edges)
234
+ nodes = [4, 5, 6, 7, 8]
235
+ SG = nx.induced_subgraph(RG, nodes)
236
+ SSG = RG.subgraph(nodes)
237
+ assert RG._graph is self.G
238
+ assert SSG._graph is self.G
239
+ assert SG._graph is RG
240
+ assert edges_equal(SG.edges, SSG.edges)
241
+ # should be same as morphing the graph
242
+ CG = self.G.copy()
243
+ CG.remove_nodes_from(hide_nodes)
244
+ CG.remove_edges_from(hide_edges)
245
+ assert edges_equal(CG.edges(nodes), SSG.edges)
246
+ CG.remove_nodes_from([0, 1, 2, 3])
247
+ assert edges_equal(CG.edges, SSG.edges)
248
+ # switch order: subgraph first, then restricted view
249
+ SSSG = self.G.subgraph(nodes)
250
+ RSG = nx.restricted_view(SSSG, hide_nodes, hide_edges)
251
+ assert RSG._graph is not self.G
252
+ assert edges_equal(RSG.edges, CG.edges)
253
+
254
+ def test_subgraph_copy(self):
255
+ for origG in self.graphs:
256
+ G = nx.Graph(origG)
257
+ SG = G.subgraph([4, 5, 6])
258
+ H = SG.copy()
259
+ assert type(G) == type(H)
260
+
261
+ def test_subgraph_todirected(self):
262
+ SG = nx.induced_subgraph(self.G, [4, 5, 6])
263
+ SSG = SG.to_directed()
264
+ assert sorted(SSG) == [4, 5, 6]
265
+ assert sorted(SSG.edges) == [(4, 5), (5, 4), (5, 6), (6, 5)]
266
+
267
+ def test_subgraph_toundirected(self):
268
+ SG = nx.induced_subgraph(self.G, [4, 5, 6])
269
+ SSG = SG.to_undirected()
270
+ assert list(SSG) == [4, 5, 6]
271
+ assert sorted(SSG.edges) == [(4, 5), (5, 6)]
272
+
273
+ def test_reverse_subgraph_toundirected(self):
274
+ G = self.DG.reverse(copy=False)
275
+ SG = G.subgraph([4, 5, 6])
276
+ SSG = SG.to_undirected()
277
+ assert list(SSG) == [4, 5, 6]
278
+ assert sorted(SSG.edges) == [(4, 5), (5, 6)]
279
+
280
+ def test_reverse_reverse_copy(self):
281
+ G = self.DG.reverse(copy=False)
282
+ H = G.reverse(copy=True)
283
+ assert H.nodes == self.DG.nodes
284
+ assert H.edges == self.DG.edges
285
+ G = self.MDG.reverse(copy=False)
286
+ H = G.reverse(copy=True)
287
+ assert H.nodes == self.MDG.nodes
288
+ assert H.edges == self.MDG.edges
289
+
290
+ def test_subgraph_edgesubgraph_toundirected(self):
291
+ G = self.G.copy()
292
+ SG = G.subgraph([4, 5, 6])
293
+ SSG = SG.edge_subgraph([(4, 5), (5, 4)])
294
+ USSG = SSG.to_undirected()
295
+ assert list(USSG) == [4, 5]
296
+ assert sorted(USSG.edges) == [(4, 5)]
297
+
298
+ def test_copy_subgraph(self):
299
+ G = self.G.copy()
300
+ SG = G.subgraph([4, 5, 6])
301
+ CSG = SG.copy(as_view=True)
302
+ DCSG = SG.copy(as_view=False)
303
+ assert hasattr(CSG, "_graph") # is a view
304
+ assert not hasattr(DCSG, "_graph") # not a view
305
+
306
+ def test_copy_disubgraph(self):
307
+ G = self.DG.copy()
308
+ SG = G.subgraph([4, 5, 6])
309
+ CSG = SG.copy(as_view=True)
310
+ DCSG = SG.copy(as_view=False)
311
+ assert hasattr(CSG, "_graph") # is a view
312
+ assert not hasattr(DCSG, "_graph") # not a view
313
+
314
+ def test_copy_multidisubgraph(self):
315
+ G = self.MDG.copy()
316
+ SG = G.subgraph([4, 5, 6])
317
+ CSG = SG.copy(as_view=True)
318
+ DCSG = SG.copy(as_view=False)
319
+ assert hasattr(CSG, "_graph") # is a view
320
+ assert not hasattr(DCSG, "_graph") # not a view
321
+
322
+ def test_copy_multisubgraph(self):
323
+ G = self.MG.copy()
324
+ SG = G.subgraph([4, 5, 6])
325
+ CSG = SG.copy(as_view=True)
326
+ DCSG = SG.copy(as_view=False)
327
+ assert hasattr(CSG, "_graph") # is a view
328
+ assert not hasattr(DCSG, "_graph") # not a view
329
+
330
+ def test_copy_of_view(self):
331
+ G = nx.MultiGraph(self.MGv)
332
+ assert G.__class__.__name__ == "MultiGraph"
333
+ G = G.copy(as_view=True)
334
+ assert G.__class__.__name__ == "MultiGraph"
335
+
336
+ def test_subclass(self):
337
+ class MyGraph(nx.DiGraph):
338
+ def my_method(self):
339
+ return "me"
340
+
341
+ def to_directed_class(self):
342
+ return MyGraph()
343
+
344
+ for origG in self.graphs:
345
+ G = MyGraph(origG)
346
+ SG = G.subgraph([4, 5, 6])
347
+ H = SG.copy()
348
+ assert SG.my_method() == "me"
349
+ assert H.my_method() == "me"
350
+ assert 3 not in H or 3 in SG
venv/lib/python3.10/site-packages/networkx/classes/tests/test_multidigraph.py ADDED
@@ -0,0 +1,459 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import UserDict
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+ from networkx.utils import edges_equal
7
+
8
+ from .test_multigraph import BaseMultiGraphTester
9
+ from .test_multigraph import TestEdgeSubgraph as _TestMultiGraphEdgeSubgraph
10
+ from .test_multigraph import TestMultiGraph as _TestMultiGraph
11
+
12
+
13
+ class BaseMultiDiGraphTester(BaseMultiGraphTester):
14
+ def test_edges(self):
15
+ G = self.K3
16
+ edges = [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
17
+ assert sorted(G.edges()) == edges
18
+ assert sorted(G.edges(0)) == [(0, 1), (0, 2)]
19
+ pytest.raises((KeyError, nx.NetworkXError), G.edges, -1)
20
+
21
+ def test_edges_data(self):
22
+ G = self.K3
23
+ edges = [(0, 1, {}), (0, 2, {}), (1, 0, {}), (1, 2, {}), (2, 0, {}), (2, 1, {})]
24
+ assert sorted(G.edges(data=True)) == edges
25
+ assert sorted(G.edges(0, data=True)) == [(0, 1, {}), (0, 2, {})]
26
+ pytest.raises((KeyError, nx.NetworkXError), G.neighbors, -1)
27
+
28
+ def test_edges_multi(self):
29
+ G = self.K3
30
+ assert sorted(G.edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
31
+ assert sorted(G.edges(0)) == [(0, 1), (0, 2)]
32
+ G.add_edge(0, 1)
33
+ assert sorted(G.edges()) == [
34
+ (0, 1),
35
+ (0, 1),
36
+ (0, 2),
37
+ (1, 0),
38
+ (1, 2),
39
+ (2, 0),
40
+ (2, 1),
41
+ ]
42
+
43
+ def test_out_edges(self):
44
+ G = self.K3
45
+ assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
46
+ assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)]
47
+ pytest.raises((KeyError, nx.NetworkXError), G.out_edges, -1)
48
+ assert sorted(G.out_edges(0, keys=True)) == [(0, 1, 0), (0, 2, 0)]
49
+
50
+ def test_out_edges_multi(self):
51
+ G = self.K3
52
+ assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
53
+ assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)]
54
+ G.add_edge(0, 1, 2)
55
+ assert sorted(G.out_edges()) == [
56
+ (0, 1),
57
+ (0, 1),
58
+ (0, 2),
59
+ (1, 0),
60
+ (1, 2),
61
+ (2, 0),
62
+ (2, 1),
63
+ ]
64
+
65
+ def test_out_edges_data(self):
66
+ G = self.K3
67
+ assert sorted(G.edges(0, data=True)) == [(0, 1, {}), (0, 2, {})]
68
+ G.remove_edge(0, 1)
69
+ G.add_edge(0, 1, data=1)
70
+ assert sorted(G.edges(0, data=True)) == [(0, 1, {"data": 1}), (0, 2, {})]
71
+ assert sorted(G.edges(0, data="data")) == [(0, 1, 1), (0, 2, None)]
72
+ assert sorted(G.edges(0, data="data", default=-1)) == [(0, 1, 1), (0, 2, -1)]
73
+
74
+ def test_in_edges(self):
75
+ G = self.K3
76
+ assert sorted(G.in_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
77
+ assert sorted(G.in_edges(0)) == [(1, 0), (2, 0)]
78
+ pytest.raises((KeyError, nx.NetworkXError), G.in_edges, -1)
79
+ G.add_edge(0, 1, 2)
80
+ assert sorted(G.in_edges()) == [
81
+ (0, 1),
82
+ (0, 1),
83
+ (0, 2),
84
+ (1, 0),
85
+ (1, 2),
86
+ (2, 0),
87
+ (2, 1),
88
+ ]
89
+ assert sorted(G.in_edges(0, keys=True)) == [(1, 0, 0), (2, 0, 0)]
90
+
91
+ def test_in_edges_no_keys(self):
92
+ G = self.K3
93
+ assert sorted(G.in_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
94
+ assert sorted(G.in_edges(0)) == [(1, 0), (2, 0)]
95
+ G.add_edge(0, 1, 2)
96
+ assert sorted(G.in_edges()) == [
97
+ (0, 1),
98
+ (0, 1),
99
+ (0, 2),
100
+ (1, 0),
101
+ (1, 2),
102
+ (2, 0),
103
+ (2, 1),
104
+ ]
105
+
106
+ assert sorted(G.in_edges(data=True, keys=False)) == [
107
+ (0, 1, {}),
108
+ (0, 1, {}),
109
+ (0, 2, {}),
110
+ (1, 0, {}),
111
+ (1, 2, {}),
112
+ (2, 0, {}),
113
+ (2, 1, {}),
114
+ ]
115
+
116
+ def test_in_edges_data(self):
117
+ G = self.K3
118
+ assert sorted(G.in_edges(0, data=True)) == [(1, 0, {}), (2, 0, {})]
119
+ G.remove_edge(1, 0)
120
+ G.add_edge(1, 0, data=1)
121
+ assert sorted(G.in_edges(0, data=True)) == [(1, 0, {"data": 1}), (2, 0, {})]
122
+ assert sorted(G.in_edges(0, data="data")) == [(1, 0, 1), (2, 0, None)]
123
+ assert sorted(G.in_edges(0, data="data", default=-1)) == [(1, 0, 1), (2, 0, -1)]
124
+
125
+ def is_shallow(self, H, G):
126
+ # graph
127
+ assert G.graph["foo"] == H.graph["foo"]
128
+ G.graph["foo"].append(1)
129
+ assert G.graph["foo"] == H.graph["foo"]
130
+ # node
131
+ assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
132
+ G.nodes[0]["foo"].append(1)
133
+ assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
134
+ # edge
135
+ assert G[1][2][0]["foo"] == H[1][2][0]["foo"]
136
+ G[1][2][0]["foo"].append(1)
137
+ assert G[1][2][0]["foo"] == H[1][2][0]["foo"]
138
+
139
+ def is_deep(self, H, G):
140
+ # graph
141
+ assert G.graph["foo"] == H.graph["foo"]
142
+ G.graph["foo"].append(1)
143
+ assert G.graph["foo"] != H.graph["foo"]
144
+ # node
145
+ assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
146
+ G.nodes[0]["foo"].append(1)
147
+ assert G.nodes[0]["foo"] != H.nodes[0]["foo"]
148
+ # edge
149
+ assert G[1][2][0]["foo"] == H[1][2][0]["foo"]
150
+ G[1][2][0]["foo"].append(1)
151
+ assert G[1][2][0]["foo"] != H[1][2][0]["foo"]
152
+
153
+ def test_to_undirected(self):
154
+ # MultiDiGraph -> MultiGraph changes number of edges so it is
155
+ # not a copy operation... use is_shallow, not is_shallow_copy
156
+ G = self.K3
157
+ self.add_attributes(G)
158
+ H = nx.MultiGraph(G)
159
+ # self.is_shallow(H,G)
160
+ # the result is traversal order dependent so we
161
+ # can't use the is_shallow() test here.
162
+ try:
163
+ assert edges_equal(H.edges(), [(0, 1), (1, 2), (2, 0)])
164
+ except AssertionError:
165
+ assert edges_equal(H.edges(), [(0, 1), (1, 2), (1, 2), (2, 0)])
166
+ H = G.to_undirected()
167
+ self.is_deep(H, G)
168
+
169
+ def test_has_successor(self):
170
+ G = self.K3
171
+ assert G.has_successor(0, 1)
172
+ assert not G.has_successor(0, -1)
173
+
174
+ def test_successors(self):
175
+ G = self.K3
176
+ assert sorted(G.successors(0)) == [1, 2]
177
+ pytest.raises((KeyError, nx.NetworkXError), G.successors, -1)
178
+
179
+ def test_has_predecessor(self):
180
+ G = self.K3
181
+ assert G.has_predecessor(0, 1)
182
+ assert not G.has_predecessor(0, -1)
183
+
184
+ def test_predecessors(self):
185
+ G = self.K3
186
+ assert sorted(G.predecessors(0)) == [1, 2]
187
+ pytest.raises((KeyError, nx.NetworkXError), G.predecessors, -1)
188
+
189
+ def test_degree(self):
190
+ G = self.K3
191
+ assert sorted(G.degree()) == [(0, 4), (1, 4), (2, 4)]
192
+ assert dict(G.degree()) == {0: 4, 1: 4, 2: 4}
193
+ assert G.degree(0) == 4
194
+ assert list(G.degree(iter([0]))) == [(0, 4)]
195
+ G.add_edge(0, 1, weight=0.3, other=1.2)
196
+ assert sorted(G.degree(weight="weight")) == [(0, 4.3), (1, 4.3), (2, 4)]
197
+ assert sorted(G.degree(weight="other")) == [(0, 5.2), (1, 5.2), (2, 4)]
198
+
199
+ def test_in_degree(self):
200
+ G = self.K3
201
+ assert sorted(G.in_degree()) == [(0, 2), (1, 2), (2, 2)]
202
+ assert dict(G.in_degree()) == {0: 2, 1: 2, 2: 2}
203
+ assert G.in_degree(0) == 2
204
+ assert list(G.in_degree(iter([0]))) == [(0, 2)]
205
+ assert G.in_degree(0, weight="weight") == 2
206
+
207
+ def test_out_degree(self):
208
+ G = self.K3
209
+ assert sorted(G.out_degree()) == [(0, 2), (1, 2), (2, 2)]
210
+ assert dict(G.out_degree()) == {0: 2, 1: 2, 2: 2}
211
+ assert G.out_degree(0) == 2
212
+ assert list(G.out_degree(iter([0]))) == [(0, 2)]
213
+ assert G.out_degree(0, weight="weight") == 2
214
+
215
+ def test_size(self):
216
+ G = self.K3
217
+ assert G.size() == 6
218
+ assert G.number_of_edges() == 6
219
+ G.add_edge(0, 1, weight=0.3, other=1.2)
220
+ assert round(G.size(weight="weight"), 2) == 6.3
221
+ assert round(G.size(weight="other"), 2) == 7.2
222
+
223
+ def test_to_undirected_reciprocal(self):
224
+ G = self.Graph()
225
+ G.add_edge(1, 2)
226
+ assert G.to_undirected().has_edge(1, 2)
227
+ assert not G.to_undirected(reciprocal=True).has_edge(1, 2)
228
+ G.add_edge(2, 1)
229
+ assert G.to_undirected(reciprocal=True).has_edge(1, 2)
230
+
231
+ def test_reverse_copy(self):
232
+ G = nx.MultiDiGraph([(0, 1), (0, 1)])
233
+ R = G.reverse()
234
+ assert sorted(R.edges()) == [(1, 0), (1, 0)]
235
+ R.remove_edge(1, 0)
236
+ assert sorted(R.edges()) == [(1, 0)]
237
+ assert sorted(G.edges()) == [(0, 1), (0, 1)]
238
+
239
+ def test_reverse_nocopy(self):
240
+ G = nx.MultiDiGraph([(0, 1), (0, 1)])
241
+ R = G.reverse(copy=False)
242
+ assert sorted(R.edges()) == [(1, 0), (1, 0)]
243
+ pytest.raises(nx.NetworkXError, R.remove_edge, 1, 0)
244
+
245
+ def test_di_attributes_cached(self):
246
+ G = self.K3.copy()
247
+ assert id(G.in_edges) == id(G.in_edges)
248
+ assert id(G.out_edges) == id(G.out_edges)
249
+ assert id(G.in_degree) == id(G.in_degree)
250
+ assert id(G.out_degree) == id(G.out_degree)
251
+ assert id(G.succ) == id(G.succ)
252
+ assert id(G.pred) == id(G.pred)
253
+
254
+
255
+ class TestMultiDiGraph(BaseMultiDiGraphTester, _TestMultiGraph):
256
+ def setup_method(self):
257
+ self.Graph = nx.MultiDiGraph
258
+ # build K3
259
+ self.k3edges = [(0, 1), (0, 2), (1, 2)]
260
+ self.k3nodes = [0, 1, 2]
261
+ self.K3 = self.Graph()
262
+ self.K3._succ = {0: {}, 1: {}, 2: {}}
263
+ # K3._adj is synced with K3._succ
264
+ self.K3._pred = {0: {}, 1: {}, 2: {}}
265
+ for u in self.k3nodes:
266
+ for v in self.k3nodes:
267
+ if u == v:
268
+ continue
269
+ d = {0: {}}
270
+ self.K3._succ[u][v] = d
271
+ self.K3._pred[v][u] = d
272
+ self.K3._node = {}
273
+ self.K3._node[0] = {}
274
+ self.K3._node[1] = {}
275
+ self.K3._node[2] = {}
276
+
277
+ def test_add_edge(self):
278
+ G = self.Graph()
279
+ G.add_edge(0, 1)
280
+ assert G._adj == {0: {1: {0: {}}}, 1: {}}
281
+ assert G._succ == {0: {1: {0: {}}}, 1: {}}
282
+ assert G._pred == {0: {}, 1: {0: {0: {}}}}
283
+ G = self.Graph()
284
+ G.add_edge(*(0, 1))
285
+ assert G._adj == {0: {1: {0: {}}}, 1: {}}
286
+ assert G._succ == {0: {1: {0: {}}}, 1: {}}
287
+ assert G._pred == {0: {}, 1: {0: {0: {}}}}
288
+ with pytest.raises(ValueError, match="None cannot be a node"):
289
+ G.add_edge(None, 3)
290
+
291
+ def test_add_edges_from(self):
292
+ G = self.Graph()
293
+ G.add_edges_from([(0, 1), (0, 1, {"weight": 3})])
294
+ assert G._adj == {0: {1: {0: {}, 1: {"weight": 3}}}, 1: {}}
295
+ assert G._succ == {0: {1: {0: {}, 1: {"weight": 3}}}, 1: {}}
296
+ assert G._pred == {0: {}, 1: {0: {0: {}, 1: {"weight": 3}}}}
297
+
298
+ G.add_edges_from([(0, 1), (0, 1, {"weight": 3})], weight=2)
299
+ assert G._succ == {
300
+ 0: {1: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}},
301
+ 1: {},
302
+ }
303
+ assert G._pred == {
304
+ 0: {},
305
+ 1: {0: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}},
306
+ }
307
+
308
+ G = self.Graph()
309
+ edges = [
310
+ (0, 1, {"weight": 3}),
311
+ (0, 1, (("weight", 2),)),
312
+ (0, 1, 5),
313
+ (0, 1, "s"),
314
+ ]
315
+ G.add_edges_from(edges)
316
+ keydict = {0: {"weight": 3}, 1: {"weight": 2}, 5: {}, "s": {}}
317
+ assert G._succ == {0: {1: keydict}, 1: {}}
318
+ assert G._pred == {1: {0: keydict}, 0: {}}
319
+
320
+ # too few in tuple
321
+ pytest.raises(nx.NetworkXError, G.add_edges_from, [(0,)])
322
+ # too many in tuple
323
+ pytest.raises(nx.NetworkXError, G.add_edges_from, [(0, 1, 2, 3, 4)])
324
+ # not a tuple
325
+ pytest.raises(TypeError, G.add_edges_from, [0])
326
+ with pytest.raises(ValueError, match="None cannot be a node"):
327
+ G.add_edges_from([(None, 3), (3, 2)])
328
+
329
+ def test_remove_edge(self):
330
+ G = self.K3
331
+ G.remove_edge(0, 1)
332
+ assert G._succ == {
333
+ 0: {2: {0: {}}},
334
+ 1: {0: {0: {}}, 2: {0: {}}},
335
+ 2: {0: {0: {}}, 1: {0: {}}},
336
+ }
337
+ assert G._pred == {
338
+ 0: {1: {0: {}}, 2: {0: {}}},
339
+ 1: {2: {0: {}}},
340
+ 2: {0: {0: {}}, 1: {0: {}}},
341
+ }
342
+ pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0)
343
+ pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, 0, 2, key=1)
344
+
345
+ def test_remove_multiedge(self):
346
+ G = self.K3
347
+ G.add_edge(0, 1, key="parallel edge")
348
+ G.remove_edge(0, 1, key="parallel edge")
349
+ assert G._adj == {
350
+ 0: {1: {0: {}}, 2: {0: {}}},
351
+ 1: {0: {0: {}}, 2: {0: {}}},
352
+ 2: {0: {0: {}}, 1: {0: {}}},
353
+ }
354
+
355
+ assert G._succ == {
356
+ 0: {1: {0: {}}, 2: {0: {}}},
357
+ 1: {0: {0: {}}, 2: {0: {}}},
358
+ 2: {0: {0: {}}, 1: {0: {}}},
359
+ }
360
+
361
+ assert G._pred == {
362
+ 0: {1: {0: {}}, 2: {0: {}}},
363
+ 1: {0: {0: {}}, 2: {0: {}}},
364
+ 2: {0: {0: {}}, 1: {0: {}}},
365
+ }
366
+ G.remove_edge(0, 1)
367
+ assert G._succ == {
368
+ 0: {2: {0: {}}},
369
+ 1: {0: {0: {}}, 2: {0: {}}},
370
+ 2: {0: {0: {}}, 1: {0: {}}},
371
+ }
372
+ assert G._pred == {
373
+ 0: {1: {0: {}}, 2: {0: {}}},
374
+ 1: {2: {0: {}}},
375
+ 2: {0: {0: {}}, 1: {0: {}}},
376
+ }
377
+ pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0)
378
+
379
+ def test_remove_edges_from(self):
380
+ G = self.K3
381
+ G.remove_edges_from([(0, 1)])
382
+ assert G._succ == {
383
+ 0: {2: {0: {}}},
384
+ 1: {0: {0: {}}, 2: {0: {}}},
385
+ 2: {0: {0: {}}, 1: {0: {}}},
386
+ }
387
+ assert G._pred == {
388
+ 0: {1: {0: {}}, 2: {0: {}}},
389
+ 1: {2: {0: {}}},
390
+ 2: {0: {0: {}}, 1: {0: {}}},
391
+ }
392
+ G.remove_edges_from([(0, 0)]) # silent fail
393
+
394
+
395
+ class TestEdgeSubgraph(_TestMultiGraphEdgeSubgraph):
396
+ """Unit tests for the :meth:`MultiDiGraph.edge_subgraph` method."""
397
+
398
+ def setup_method(self):
399
+ # Create a quadruply-linked path graph on five nodes.
400
+ G = nx.MultiDiGraph()
401
+ nx.add_path(G, range(5))
402
+ nx.add_path(G, range(5))
403
+ nx.add_path(G, reversed(range(5)))
404
+ nx.add_path(G, reversed(range(5)))
405
+ # Add some node, edge, and graph attributes.
406
+ for i in range(5):
407
+ G.nodes[i]["name"] = f"node{i}"
408
+ G.adj[0][1][0]["name"] = "edge010"
409
+ G.adj[0][1][1]["name"] = "edge011"
410
+ G.adj[3][4][0]["name"] = "edge340"
411
+ G.adj[3][4][1]["name"] = "edge341"
412
+ G.graph["name"] = "graph"
413
+ # Get the subgraph induced by one of the first edges and one of
414
+ # the last edges.
415
+ self.G = G
416
+ self.H = G.edge_subgraph([(0, 1, 0), (3, 4, 1)])
417
+
418
+
419
+ class CustomDictClass(UserDict):
420
+ pass
421
+
422
+
423
+ class MultiDiGraphSubClass(nx.MultiDiGraph):
424
+ node_dict_factory = CustomDictClass # type: ignore[assignment]
425
+ node_attr_dict_factory = CustomDictClass # type: ignore[assignment]
426
+ adjlist_outer_dict_factory = CustomDictClass # type: ignore[assignment]
427
+ adjlist_inner_dict_factory = CustomDictClass # type: ignore[assignment]
428
+ edge_key_dict_factory = CustomDictClass # type: ignore[assignment]
429
+ edge_attr_dict_factory = CustomDictClass # type: ignore[assignment]
430
+ graph_attr_dict_factory = CustomDictClass # type: ignore[assignment]
431
+
432
+
433
+ class TestMultiDiGraphSubclass(TestMultiDiGraph):
434
+ def setup_method(self):
435
+ self.Graph = MultiDiGraphSubClass
436
+ # build K3
437
+ self.k3edges = [(0, 1), (0, 2), (1, 2)]
438
+ self.k3nodes = [0, 1, 2]
439
+ self.K3 = self.Graph()
440
+ self.K3._succ = self.K3.adjlist_outer_dict_factory(
441
+ {
442
+ 0: self.K3.adjlist_inner_dict_factory(),
443
+ 1: self.K3.adjlist_inner_dict_factory(),
444
+ 2: self.K3.adjlist_inner_dict_factory(),
445
+ }
446
+ )
447
+ # K3._adj is synced with K3._succ
448
+ self.K3._pred = {0: {}, 1: {}, 2: {}}
449
+ for u in self.k3nodes:
450
+ for v in self.k3nodes:
451
+ if u == v:
452
+ continue
453
+ d = {0: {}}
454
+ self.K3._succ[u][v] = d
455
+ self.K3._pred[v][u] = d
456
+ self.K3._node = self.K3.node_dict_factory()
457
+ self.K3._node[0] = self.K3.node_attr_dict_factory()
458
+ self.K3._node[1] = self.K3.node_attr_dict_factory()
459
+ self.K3._node[2] = self.K3.node_attr_dict_factory()
venv/lib/python3.10/site-packages/networkx/classes/tests/test_multigraph.py ADDED
@@ -0,0 +1,528 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import UserDict
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+ from networkx.utils import edges_equal
7
+
8
+ from .test_graph import BaseAttrGraphTester
9
+ from .test_graph import TestGraph as _TestGraph
10
+
11
+
12
+ class BaseMultiGraphTester(BaseAttrGraphTester):
13
+ def test_has_edge(self):
14
+ G = self.K3
15
+ assert G.has_edge(0, 1)
16
+ assert not G.has_edge(0, -1)
17
+ assert G.has_edge(0, 1, 0)
18
+ assert not G.has_edge(0, 1, 1)
19
+
20
+ def test_get_edge_data(self):
21
+ G = self.K3
22
+ assert G.get_edge_data(0, 1) == {0: {}}
23
+ assert G[0][1] == {0: {}}
24
+ assert G[0][1][0] == {}
25
+ assert G.get_edge_data(10, 20) is None
26
+ assert G.get_edge_data(0, 1, 0) == {}
27
+
28
+ def test_adjacency(self):
29
+ G = self.K3
30
+ assert dict(G.adjacency()) == {
31
+ 0: {1: {0: {}}, 2: {0: {}}},
32
+ 1: {0: {0: {}}, 2: {0: {}}},
33
+ 2: {0: {0: {}}, 1: {0: {}}},
34
+ }
35
+
36
+ def deepcopy_edge_attr(self, H, G):
37
+ assert G[1][2][0]["foo"] == H[1][2][0]["foo"]
38
+ G[1][2][0]["foo"].append(1)
39
+ assert G[1][2][0]["foo"] != H[1][2][0]["foo"]
40
+
41
+ def shallow_copy_edge_attr(self, H, G):
42
+ assert G[1][2][0]["foo"] == H[1][2][0]["foo"]
43
+ G[1][2][0]["foo"].append(1)
44
+ assert G[1][2][0]["foo"] == H[1][2][0]["foo"]
45
+
46
+ def graphs_equal(self, H, G):
47
+ assert G._adj == H._adj
48
+ assert G._node == H._node
49
+ assert G.graph == H.graph
50
+ assert G.name == H.name
51
+ if not G.is_directed() and not H.is_directed():
52
+ assert H._adj[1][2][0] is H._adj[2][1][0]
53
+ assert G._adj[1][2][0] is G._adj[2][1][0]
54
+ else: # at least one is directed
55
+ if not G.is_directed():
56
+ G._pred = G._adj
57
+ G._succ = G._adj
58
+ if not H.is_directed():
59
+ H._pred = H._adj
60
+ H._succ = H._adj
61
+ assert G._pred == H._pred
62
+ assert G._succ == H._succ
63
+ assert H._succ[1][2][0] is H._pred[2][1][0]
64
+ assert G._succ[1][2][0] is G._pred[2][1][0]
65
+
66
+ def same_attrdict(self, H, G):
67
+ # same attrdict in the edgedata
68
+ old_foo = H[1][2][0]["foo"]
69
+ H.adj[1][2][0]["foo"] = "baz"
70
+ assert G._adj == H._adj
71
+ H.adj[1][2][0]["foo"] = old_foo
72
+ assert G._adj == H._adj
73
+
74
+ old_foo = H.nodes[0]["foo"]
75
+ H.nodes[0]["foo"] = "baz"
76
+ assert G._node == H._node
77
+ H.nodes[0]["foo"] = old_foo
78
+ assert G._node == H._node
79
+
80
+ def different_attrdict(self, H, G):
81
+ # used by graph_equal_but_different
82
+ old_foo = H[1][2][0]["foo"]
83
+ H.adj[1][2][0]["foo"] = "baz"
84
+ assert G._adj != H._adj
85
+ H.adj[1][2][0]["foo"] = old_foo
86
+ assert G._adj == H._adj
87
+
88
+ old_foo = H.nodes[0]["foo"]
89
+ H.nodes[0]["foo"] = "baz"
90
+ assert G._node != H._node
91
+ H.nodes[0]["foo"] = old_foo
92
+ assert G._node == H._node
93
+
94
+ def test_to_undirected(self):
95
+ G = self.K3
96
+ self.add_attributes(G)
97
+ H = nx.MultiGraph(G)
98
+ self.is_shallow_copy(H, G)
99
+ H = G.to_undirected()
100
+ self.is_deepcopy(H, G)
101
+
102
+ def test_to_directed(self):
103
+ G = self.K3
104
+ self.add_attributes(G)
105
+ H = nx.MultiDiGraph(G)
106
+ self.is_shallow_copy(H, G)
107
+ H = G.to_directed()
108
+ self.is_deepcopy(H, G)
109
+
110
+ def test_number_of_edges_selfloops(self):
111
+ G = self.K3
112
+ G.add_edge(0, 0)
113
+ G.add_edge(0, 0)
114
+ G.add_edge(0, 0, key="parallel edge")
115
+ G.remove_edge(0, 0, key="parallel edge")
116
+ assert G.number_of_edges(0, 0) == 2
117
+ G.remove_edge(0, 0)
118
+ assert G.number_of_edges(0, 0) == 1
119
+
120
+ def test_edge_lookup(self):
121
+ G = self.Graph()
122
+ G.add_edge(1, 2, foo="bar")
123
+ G.add_edge(1, 2, "key", foo="biz")
124
+ assert edges_equal(G.edges[1, 2, 0], {"foo": "bar"})
125
+ assert edges_equal(G.edges[1, 2, "key"], {"foo": "biz"})
126
+
127
+ def test_edge_attr(self):
128
+ G = self.Graph()
129
+ G.add_edge(1, 2, key="k1", foo="bar")
130
+ G.add_edge(1, 2, key="k2", foo="baz")
131
+ assert isinstance(G.get_edge_data(1, 2), G.edge_key_dict_factory)
132
+ assert all(
133
+ isinstance(d, G.edge_attr_dict_factory) for u, v, d in G.edges(data=True)
134
+ )
135
+ assert edges_equal(
136
+ G.edges(keys=True, data=True),
137
+ [(1, 2, "k1", {"foo": "bar"}), (1, 2, "k2", {"foo": "baz"})],
138
+ )
139
+ assert edges_equal(
140
+ G.edges(keys=True, data="foo"), [(1, 2, "k1", "bar"), (1, 2, "k2", "baz")]
141
+ )
142
+
143
+ def test_edge_attr4(self):
144
+ G = self.Graph()
145
+ G.add_edge(1, 2, key=0, data=7, spam="bar", bar="foo")
146
+ assert edges_equal(
147
+ G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})]
148
+ )
149
+ G[1][2][0]["data"] = 10 # OK to set data like this
150
+ assert edges_equal(
151
+ G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})]
152
+ )
153
+
154
+ G.adj[1][2][0]["data"] = 20
155
+ assert edges_equal(
156
+ G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})]
157
+ )
158
+ G.edges[1, 2, 0]["data"] = 21 # another spelling, "edge"
159
+ assert edges_equal(
160
+ G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})]
161
+ )
162
+ G.adj[1][2][0]["listdata"] = [20, 200]
163
+ G.adj[1][2][0]["weight"] = 20
164
+ assert edges_equal(
165
+ G.edges(data=True),
166
+ [
167
+ (
168
+ 1,
169
+ 2,
170
+ {
171
+ "data": 21,
172
+ "spam": "bar",
173
+ "bar": "foo",
174
+ "listdata": [20, 200],
175
+ "weight": 20,
176
+ },
177
+ )
178
+ ],
179
+ )
180
+
181
+
182
+ class TestMultiGraph(BaseMultiGraphTester, _TestGraph):
183
+ def setup_method(self):
184
+ self.Graph = nx.MultiGraph
185
+ # build K3
186
+ ed1, ed2, ed3 = ({0: {}}, {0: {}}, {0: {}})
187
+ self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}}
188
+ self.k3edges = [(0, 1), (0, 2), (1, 2)]
189
+ self.k3nodes = [0, 1, 2]
190
+ self.K3 = self.Graph()
191
+ self.K3._adj = self.k3adj
192
+ self.K3._node = {}
193
+ self.K3._node[0] = {}
194
+ self.K3._node[1] = {}
195
+ self.K3._node[2] = {}
196
+
197
+ def test_data_input(self):
198
+ G = self.Graph({1: [2], 2: [1]}, name="test")
199
+ assert G.name == "test"
200
+ expected = [(1, {2: {0: {}}}), (2, {1: {0: {}}})]
201
+ assert sorted(G.adj.items()) == expected
202
+
203
+ def test_data_multigraph_input(self):
204
+ # standard case with edge keys and edge data
205
+ edata0 = {"w": 200, "s": "foo"}
206
+ edata1 = {"w": 201, "s": "bar"}
207
+ keydict = {0: edata0, 1: edata1}
208
+ dododod = {"a": {"b": keydict}}
209
+
210
+ multiple_edge = [("a", "b", 0, edata0), ("a", "b", 1, edata1)]
211
+ single_edge = [("a", "b", 0, keydict)]
212
+
213
+ G = self.Graph(dododod, multigraph_input=True)
214
+ assert list(G.edges(keys=True, data=True)) == multiple_edge
215
+ G = self.Graph(dododod, multigraph_input=None)
216
+ assert list(G.edges(keys=True, data=True)) == multiple_edge
217
+ G = self.Graph(dododod, multigraph_input=False)
218
+ assert list(G.edges(keys=True, data=True)) == single_edge
219
+
220
+ # test round-trip to_dict_of_dict and MultiGraph constructor
221
+ G = self.Graph(dododod, multigraph_input=True)
222
+ H = self.Graph(nx.to_dict_of_dicts(G))
223
+ assert nx.is_isomorphic(G, H) is True # test that default is True
224
+ for mgi in [True, False]:
225
+ H = self.Graph(nx.to_dict_of_dicts(G), multigraph_input=mgi)
226
+ assert nx.is_isomorphic(G, H) == mgi
227
+
228
+ # Set up cases for when incoming_graph_data is not multigraph_input
229
+ etraits = {"w": 200, "s": "foo"}
230
+ egraphics = {"color": "blue", "shape": "box"}
231
+ edata = {"traits": etraits, "graphics": egraphics}
232
+ dodod1 = {"a": {"b": edata}}
233
+ dodod2 = {"a": {"b": etraits}}
234
+ dodod3 = {"a": {"b": {"traits": etraits, "s": "foo"}}}
235
+ dol = {"a": ["b"]}
236
+
237
+ multiple_edge = [("a", "b", "traits", etraits), ("a", "b", "graphics", egraphics)]
238
+ single_edge = [("a", "b", 0, {})] # type: ignore[var-annotated]
239
+ single_edge1 = [("a", "b", 0, edata)]
240
+ single_edge2 = [("a", "b", 0, etraits)]
241
+ single_edge3 = [("a", "b", 0, {"traits": etraits, "s": "foo"})]
242
+
243
+ cases = [ # (dod, mgi, edges)
244
+ (dodod1, True, multiple_edge),
245
+ (dodod1, False, single_edge1),
246
+ (dodod2, False, single_edge2),
247
+ (dodod3, False, single_edge3),
248
+ (dol, False, single_edge),
249
+ ]
250
+
251
+ @pytest.mark.parametrize("dod, mgi, edges", cases)
252
+ def test_non_multigraph_input(self, dod, mgi, edges):
253
+ G = self.Graph(dod, multigraph_input=mgi)
254
+ assert list(G.edges(keys=True, data=True)) == edges
255
+ G = nx.to_networkx_graph(dod, create_using=self.Graph, multigraph_input=mgi)
256
+ assert list(G.edges(keys=True, data=True)) == edges
257
+
258
+ mgi_none_cases = [
259
+ (dodod1, multiple_edge),
260
+ (dodod2, single_edge2),
261
+ (dodod3, single_edge3),
262
+ ]
263
+
264
+ @pytest.mark.parametrize("dod, edges", mgi_none_cases)
265
+ def test_non_multigraph_input_mgi_none(self, dod, edges):
266
+ # test constructor without to_networkx_graph for mgi=None
267
+ G = self.Graph(dod)
268
+ assert list(G.edges(keys=True, data=True)) == edges
269
+
270
+ raise_cases = [dodod2, dodod3, dol]
271
+
272
+ @pytest.mark.parametrize("dod", raise_cases)
273
+ def test_non_multigraph_input_raise(self, dod):
274
+ # cases where NetworkXError is raised
275
+ pytest.raises(nx.NetworkXError, self.Graph, dod, multigraph_input=True)
276
+ pytest.raises(
277
+ nx.NetworkXError,
278
+ nx.to_networkx_graph,
279
+ dod,
280
+ create_using=self.Graph,
281
+ multigraph_input=True,
282
+ )
283
+
284
+ def test_getitem(self):
285
+ G = self.K3
286
+ assert G[0] == {1: {0: {}}, 2: {0: {}}}
287
+ with pytest.raises(KeyError):
288
+ G.__getitem__("j")
289
+ with pytest.raises(TypeError):
290
+ G.__getitem__(["A"])
291
+
292
+ def test_remove_node(self):
293
+ G = self.K3
294
+ G.remove_node(0)
295
+ assert G.adj == {1: {2: {0: {}}}, 2: {1: {0: {}}}}
296
+ with pytest.raises(nx.NetworkXError):
297
+ G.remove_node(-1)
298
+
299
+ def test_add_edge(self):
300
+ G = self.Graph()
301
+ G.add_edge(0, 1)
302
+ assert G.adj == {0: {1: {0: {}}}, 1: {0: {0: {}}}}
303
+ G = self.Graph()
304
+ G.add_edge(*(0, 1))
305
+ assert G.adj == {0: {1: {0: {}}}, 1: {0: {0: {}}}}
306
+ G = self.Graph()
307
+ with pytest.raises(ValueError):
308
+ G.add_edge(None, "anything")
309
+
310
+ def test_add_edge_conflicting_key(self):
311
+ G = self.Graph()
312
+ G.add_edge(0, 1, key=1)
313
+ G.add_edge(0, 1)
314
+ assert G.number_of_edges() == 2
315
+ G = self.Graph()
316
+ G.add_edges_from([(0, 1, 1, {})])
317
+ G.add_edges_from([(0, 1)])
318
+ assert G.number_of_edges() == 2
319
+
320
+ def test_add_edges_from(self):
321
+ G = self.Graph()
322
+ G.add_edges_from([(0, 1), (0, 1, {"weight": 3})])
323
+ assert G.adj == {
324
+ 0: {1: {0: {}, 1: {"weight": 3}}},
325
+ 1: {0: {0: {}, 1: {"weight": 3}}},
326
+ }
327
+ G.add_edges_from([(0, 1), (0, 1, {"weight": 3})], weight=2)
328
+ assert G.adj == {
329
+ 0: {1: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}},
330
+ 1: {0: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}},
331
+ }
332
+ G = self.Graph()
333
+ edges = [
334
+ (0, 1, {"weight": 3}),
335
+ (0, 1, (("weight", 2),)),
336
+ (0, 1, 5),
337
+ (0, 1, "s"),
338
+ ]
339
+ G.add_edges_from(edges)
340
+ keydict = {0: {"weight": 3}, 1: {"weight": 2}, 5: {}, "s": {}}
341
+ assert G._adj == {0: {1: keydict}, 1: {0: keydict}}
342
+
343
+ # too few in tuple
344
+ with pytest.raises(nx.NetworkXError):
345
+ G.add_edges_from([(0,)])
346
+ # too many in tuple
347
+ with pytest.raises(nx.NetworkXError):
348
+ G.add_edges_from([(0, 1, 2, 3, 4)])
349
+ # not a tuple
350
+ with pytest.raises(TypeError):
351
+ G.add_edges_from([0])
352
+
353
+ def test_multigraph_add_edges_from_four_tuple_misordered(self):
354
+ """add_edges_from expects 4-tuples of the format (u, v, key, data_dict).
355
+
356
+ Ensure 4-tuples of form (u, v, data_dict, key) raise exception.
357
+ """
358
+ G = nx.MultiGraph()
359
+ with pytest.raises(TypeError):
360
+ # key/data values flipped in 4-tuple
361
+ G.add_edges_from([(0, 1, {"color": "red"}, 0)])
362
+
363
+ def test_remove_edge(self):
364
+ G = self.K3
365
+ G.remove_edge(0, 1)
366
+ assert G.adj == {0: {2: {0: {}}}, 1: {2: {0: {}}}, 2: {0: {0: {}}, 1: {0: {}}}}
367
+
368
+ with pytest.raises(nx.NetworkXError):
369
+ G.remove_edge(-1, 0)
370
+ with pytest.raises(nx.NetworkXError):
371
+ G.remove_edge(0, 2, key=1)
372
+
373
+ def test_remove_edges_from(self):
374
+ G = self.K3.copy()
375
+ G.remove_edges_from([(0, 1)])
376
+ kd = {0: {}}
377
+ assert G.adj == {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}}
378
+ G.remove_edges_from([(0, 0)]) # silent fail
379
+ self.K3.add_edge(0, 1)
380
+ G = self.K3.copy()
381
+ G.remove_edges_from(list(G.edges(data=True, keys=True)))
382
+ assert G.adj == {0: {}, 1: {}, 2: {}}
383
+ G = self.K3.copy()
384
+ G.remove_edges_from(list(G.edges(data=False, keys=True)))
385
+ assert G.adj == {0: {}, 1: {}, 2: {}}
386
+ G = self.K3.copy()
387
+ G.remove_edges_from(list(G.edges(data=False, keys=False)))
388
+ assert G.adj == {0: {}, 1: {}, 2: {}}
389
+ G = self.K3.copy()
390
+ G.remove_edges_from([(0, 1, 0), (0, 2, 0, {}), (1, 2)])
391
+ assert G.adj == {0: {1: {1: {}}}, 1: {0: {1: {}}}, 2: {}}
392
+
393
+ def test_remove_multiedge(self):
394
+ G = self.K3
395
+ G.add_edge(0, 1, key="parallel edge")
396
+ G.remove_edge(0, 1, key="parallel edge")
397
+ assert G.adj == {
398
+ 0: {1: {0: {}}, 2: {0: {}}},
399
+ 1: {0: {0: {}}, 2: {0: {}}},
400
+ 2: {0: {0: {}}, 1: {0: {}}},
401
+ }
402
+ G.remove_edge(0, 1)
403
+ kd = {0: {}}
404
+ assert G.adj == {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}}
405
+ with pytest.raises(nx.NetworkXError):
406
+ G.remove_edge(-1, 0)
407
+
408
+
409
+ class TestEdgeSubgraph:
410
+ """Unit tests for the :meth:`MultiGraph.edge_subgraph` method."""
411
+
412
+ def setup_method(self):
413
+ # Create a doubly-linked path graph on five nodes.
414
+ G = nx.MultiGraph()
415
+ nx.add_path(G, range(5))
416
+ nx.add_path(G, range(5))
417
+ # Add some node, edge, and graph attributes.
418
+ for i in range(5):
419
+ G.nodes[i]["name"] = f"node{i}"
420
+ G.adj[0][1][0]["name"] = "edge010"
421
+ G.adj[0][1][1]["name"] = "edge011"
422
+ G.adj[3][4][0]["name"] = "edge340"
423
+ G.adj[3][4][1]["name"] = "edge341"
424
+ G.graph["name"] = "graph"
425
+ # Get the subgraph induced by one of the first edges and one of
426
+ # the last edges.
427
+ self.G = G
428
+ self.H = G.edge_subgraph([(0, 1, 0), (3, 4, 1)])
429
+
430
+ def test_correct_nodes(self):
431
+ """Tests that the subgraph has the correct nodes."""
432
+ assert [0, 1, 3, 4] == sorted(self.H.nodes())
433
+
434
+ def test_correct_edges(self):
435
+ """Tests that the subgraph has the correct edges."""
436
+ assert [(0, 1, 0, "edge010"), (3, 4, 1, "edge341")] == sorted(
437
+ self.H.edges(keys=True, data="name")
438
+ )
439
+
440
+ def test_add_node(self):
441
+ """Tests that adding a node to the original graph does not
442
+ affect the nodes of the subgraph.
443
+
444
+ """
445
+ self.G.add_node(5)
446
+ assert [0, 1, 3, 4] == sorted(self.H.nodes())
447
+
448
+ def test_remove_node(self):
449
+ """Tests that removing a node in the original graph does
450
+ affect the nodes of the subgraph.
451
+
452
+ """
453
+ self.G.remove_node(0)
454
+ assert [1, 3, 4] == sorted(self.H.nodes())
455
+
456
+ def test_node_attr_dict(self):
457
+ """Tests that the node attribute dictionary of the two graphs is
458
+ the same object.
459
+
460
+ """
461
+ for v in self.H:
462
+ assert self.G.nodes[v] == self.H.nodes[v]
463
+ # Making a change to G should make a change in H and vice versa.
464
+ self.G.nodes[0]["name"] = "foo"
465
+ assert self.G.nodes[0] == self.H.nodes[0]
466
+ self.H.nodes[1]["name"] = "bar"
467
+ assert self.G.nodes[1] == self.H.nodes[1]
468
+
469
+ def test_edge_attr_dict(self):
470
+ """Tests that the edge attribute dictionary of the two graphs is
471
+ the same object.
472
+
473
+ """
474
+ for u, v, k in self.H.edges(keys=True):
475
+ assert self.G._adj[u][v][k] == self.H._adj[u][v][k]
476
+ # Making a change to G should make a change in H and vice versa.
477
+ self.G._adj[0][1][0]["name"] = "foo"
478
+ assert self.G._adj[0][1][0]["name"] == self.H._adj[0][1][0]["name"]
479
+ self.H._adj[3][4][1]["name"] = "bar"
480
+ assert self.G._adj[3][4][1]["name"] == self.H._adj[3][4][1]["name"]
481
+
482
+ def test_graph_attr_dict(self):
483
+ """Tests that the graph attribute dictionary of the two graphs
484
+ is the same object.
485
+
486
+ """
487
+ assert self.G.graph is self.H.graph
488
+
489
+
490
+ class CustomDictClass(UserDict):
491
+ pass
492
+
493
+
494
+ class MultiGraphSubClass(nx.MultiGraph):
495
+ node_dict_factory = CustomDictClass # type: ignore[assignment]
496
+ node_attr_dict_factory = CustomDictClass # type: ignore[assignment]
497
+ adjlist_outer_dict_factory = CustomDictClass # type: ignore[assignment]
498
+ adjlist_inner_dict_factory = CustomDictClass # type: ignore[assignment]
499
+ edge_key_dict_factory = CustomDictClass # type: ignore[assignment]
500
+ edge_attr_dict_factory = CustomDictClass # type: ignore[assignment]
501
+ graph_attr_dict_factory = CustomDictClass # type: ignore[assignment]
502
+
503
+
504
+ class TestMultiGraphSubclass(TestMultiGraph):
505
+ def setup_method(self):
506
+ self.Graph = MultiGraphSubClass
507
+ # build K3
508
+ self.k3edges = [(0, 1), (0, 2), (1, 2)]
509
+ self.k3nodes = [0, 1, 2]
510
+ self.K3 = self.Graph()
511
+ self.K3._adj = self.K3.adjlist_outer_dict_factory(
512
+ {
513
+ 0: self.K3.adjlist_inner_dict_factory(),
514
+ 1: self.K3.adjlist_inner_dict_factory(),
515
+ 2: self.K3.adjlist_inner_dict_factory(),
516
+ }
517
+ )
518
+ self.K3._pred = {0: {}, 1: {}, 2: {}}
519
+ for u in self.k3nodes:
520
+ for v in self.k3nodes:
521
+ if u != v:
522
+ d = {0: {}}
523
+ self.K3._adj[u][v] = d
524
+ self.K3._adj[v][u] = d
525
+ self.K3._node = self.K3.node_dict_factory()
526
+ self.K3._node[0] = self.K3.node_attr_dict_factory()
527
+ self.K3._node[1] = self.K3.node_attr_dict_factory()
528
+ self.K3._node[2] = self.K3.node_attr_dict_factory()
venv/lib/python3.10/site-packages/networkx/classes/tests/test_reportviews.py ADDED
@@ -0,0 +1,1427 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+ from copy import deepcopy
3
+
4
+ import pytest
5
+
6
+ import networkx as nx
7
+ from networkx.classes import reportviews as rv
8
+ from networkx.classes.reportviews import NodeDataView
9
+
10
+
11
+ # Nodes
12
+ class TestNodeView:
13
+ @classmethod
14
+ def setup_class(cls):
15
+ cls.G = nx.path_graph(9)
16
+ cls.nv = cls.G.nodes # NodeView(G)
17
+
18
+ def test_pickle(self):
19
+ import pickle
20
+
21
+ nv = self.nv
22
+ pnv = pickle.loads(pickle.dumps(nv, -1))
23
+ assert nv == pnv
24
+ assert nv.__slots__ == pnv.__slots__
25
+
26
+ def test_str(self):
27
+ assert str(self.nv) == "[0, 1, 2, 3, 4, 5, 6, 7, 8]"
28
+
29
+ def test_repr(self):
30
+ assert repr(self.nv) == "NodeView((0, 1, 2, 3, 4, 5, 6, 7, 8))"
31
+
32
+ def test_contains(self):
33
+ G = self.G.copy()
34
+ nv = G.nodes
35
+ assert 7 in nv
36
+ assert 9 not in nv
37
+ G.remove_node(7)
38
+ G.add_node(9)
39
+ assert 7 not in nv
40
+ assert 9 in nv
41
+
42
+ def test_getitem(self):
43
+ G = self.G.copy()
44
+ nv = G.nodes
45
+ G.nodes[3]["foo"] = "bar"
46
+ assert nv[7] == {}
47
+ assert nv[3] == {"foo": "bar"}
48
+ # slicing
49
+ with pytest.raises(nx.NetworkXError):
50
+ G.nodes[0:5]
51
+
52
+ def test_iter(self):
53
+ nv = self.nv
54
+ for i, n in enumerate(nv):
55
+ assert i == n
56
+ inv = iter(nv)
57
+ assert next(inv) == 0
58
+ assert iter(nv) != nv
59
+ assert iter(inv) == inv
60
+ inv2 = iter(nv)
61
+ next(inv2)
62
+ assert list(inv) == list(inv2)
63
+ # odd case where NodeView calls NodeDataView with data=False
64
+ nnv = nv(data=False)
65
+ for i, n in enumerate(nnv):
66
+ assert i == n
67
+
68
+ def test_call(self):
69
+ nodes = self.nv
70
+ assert nodes is nodes()
71
+ assert nodes is not nodes(data=True)
72
+ assert nodes is not nodes(data="weight")
73
+
74
+
75
+ class TestNodeDataView:
76
+ @classmethod
77
+ def setup_class(cls):
78
+ cls.G = nx.path_graph(9)
79
+ cls.nv = NodeDataView(cls.G)
80
+ cls.ndv = cls.G.nodes.data(True)
81
+ cls.nwv = cls.G.nodes.data("foo")
82
+
83
+ def test_viewtype(self):
84
+ nv = self.G.nodes
85
+ ndvfalse = nv.data(False)
86
+ assert nv is ndvfalse
87
+ assert nv is not self.ndv
88
+
89
+ def test_pickle(self):
90
+ import pickle
91
+
92
+ nv = self.nv
93
+ pnv = pickle.loads(pickle.dumps(nv, -1))
94
+ assert nv == pnv
95
+ assert nv.__slots__ == pnv.__slots__
96
+
97
+ def test_str(self):
98
+ msg = str([(n, {}) for n in range(9)])
99
+ assert str(self.ndv) == msg
100
+
101
+ def test_repr(self):
102
+ expected = "NodeDataView((0, 1, 2, 3, 4, 5, 6, 7, 8))"
103
+ assert repr(self.nv) == expected
104
+ expected = (
105
+ "NodeDataView({0: {}, 1: {}, 2: {}, 3: {}, "
106
+ + "4: {}, 5: {}, 6: {}, 7: {}, 8: {}})"
107
+ )
108
+ assert repr(self.ndv) == expected
109
+ expected = (
110
+ "NodeDataView({0: None, 1: None, 2: None, 3: None, 4: None, "
111
+ + "5: None, 6: None, 7: None, 8: None}, data='foo')"
112
+ )
113
+ assert repr(self.nwv) == expected
114
+
115
+ def test_contains(self):
116
+ G = self.G.copy()
117
+ nv = G.nodes.data()
118
+ nwv = G.nodes.data("foo")
119
+ G.nodes[3]["foo"] = "bar"
120
+ assert (7, {}) in nv
121
+ assert (3, {"foo": "bar"}) in nv
122
+ assert (3, "bar") in nwv
123
+ assert (7, None) in nwv
124
+ # default
125
+ nwv_def = G.nodes(data="foo", default="biz")
126
+ assert (7, "biz") in nwv_def
127
+ assert (3, "bar") in nwv_def
128
+
129
+ def test_getitem(self):
130
+ G = self.G.copy()
131
+ nv = G.nodes
132
+ G.nodes[3]["foo"] = "bar"
133
+ assert nv[3] == {"foo": "bar"}
134
+ # default
135
+ nwv_def = G.nodes(data="foo", default="biz")
136
+ assert nwv_def[7], "biz"
137
+ assert nwv_def[3] == "bar"
138
+ # slicing
139
+ with pytest.raises(nx.NetworkXError):
140
+ G.nodes.data()[0:5]
141
+
142
+ def test_iter(self):
143
+ G = self.G.copy()
144
+ nv = G.nodes.data()
145
+ ndv = G.nodes.data(True)
146
+ nwv = G.nodes.data("foo")
147
+ for i, (n, d) in enumerate(nv):
148
+ assert i == n
149
+ assert d == {}
150
+ inv = iter(nv)
151
+ assert next(inv) == (0, {})
152
+ G.nodes[3]["foo"] = "bar"
153
+ # default
154
+ for n, d in nv:
155
+ if n == 3:
156
+ assert d == {"foo": "bar"}
157
+ else:
158
+ assert d == {}
159
+ # data=True
160
+ for n, d in ndv:
161
+ if n == 3:
162
+ assert d == {"foo": "bar"}
163
+ else:
164
+ assert d == {}
165
+ # data='foo'
166
+ for n, d in nwv:
167
+ if n == 3:
168
+ assert d == "bar"
169
+ else:
170
+ assert d is None
171
+ # data='foo', default=1
172
+ for n, d in G.nodes.data("foo", default=1):
173
+ if n == 3:
174
+ assert d == "bar"
175
+ else:
176
+ assert d == 1
177
+
178
+
179
+ def test_nodedataview_unhashable():
180
+ G = nx.path_graph(9)
181
+ G.nodes[3]["foo"] = "bar"
182
+ nvs = [G.nodes.data()]
183
+ nvs.append(G.nodes.data(True))
184
+ H = G.copy()
185
+ H.nodes[4]["foo"] = {1, 2, 3}
186
+ nvs.append(H.nodes.data(True))
187
+ # raise unhashable
188
+ for nv in nvs:
189
+ pytest.raises(TypeError, set, nv)
190
+ pytest.raises(TypeError, eval, "nv | nv", locals())
191
+ # no raise... hashable
192
+ Gn = G.nodes.data(False)
193
+ set(Gn)
194
+ Gn | Gn
195
+ Gn = G.nodes.data("foo")
196
+ set(Gn)
197
+ Gn | Gn
198
+
199
+
200
+ class TestNodeViewSetOps:
201
+ @classmethod
202
+ def setup_class(cls):
203
+ cls.G = nx.path_graph(9)
204
+ cls.G.nodes[3]["foo"] = "bar"
205
+ cls.nv = cls.G.nodes
206
+
207
+ def n_its(self, nodes):
208
+ return set(nodes)
209
+
210
+ def test_len(self):
211
+ G = self.G.copy()
212
+ nv = G.nodes
213
+ assert len(nv) == 9
214
+ G.remove_node(7)
215
+ assert len(nv) == 8
216
+ G.add_node(9)
217
+ assert len(nv) == 9
218
+
219
+ def test_and(self):
220
+ # print("G & H nodes:", gnv & hnv)
221
+ nv = self.nv
222
+ some_nodes = self.n_its(range(5, 12))
223
+ assert nv & some_nodes == self.n_its(range(5, 9))
224
+ assert some_nodes & nv == self.n_its(range(5, 9))
225
+
226
+ def test_or(self):
227
+ # print("G | H nodes:", gnv | hnv)
228
+ nv = self.nv
229
+ some_nodes = self.n_its(range(5, 12))
230
+ assert nv | some_nodes == self.n_its(range(12))
231
+ assert some_nodes | nv == self.n_its(range(12))
232
+
233
+ def test_xor(self):
234
+ # print("G ^ H nodes:", gnv ^ hnv)
235
+ nv = self.nv
236
+ some_nodes = self.n_its(range(5, 12))
237
+ nodes = {0, 1, 2, 3, 4, 9, 10, 11}
238
+ assert nv ^ some_nodes == self.n_its(nodes)
239
+ assert some_nodes ^ nv == self.n_its(nodes)
240
+
241
+ def test_sub(self):
242
+ # print("G - H nodes:", gnv - hnv)
243
+ nv = self.nv
244
+ some_nodes = self.n_its(range(5, 12))
245
+ assert nv - some_nodes == self.n_its(range(5))
246
+ assert some_nodes - nv == self.n_its(range(9, 12))
247
+
248
+
249
+ class TestNodeDataViewSetOps(TestNodeViewSetOps):
250
+ @classmethod
251
+ def setup_class(cls):
252
+ cls.G = nx.path_graph(9)
253
+ cls.G.nodes[3]["foo"] = "bar"
254
+ cls.nv = cls.G.nodes.data("foo")
255
+
256
+ def n_its(self, nodes):
257
+ return {(node, "bar" if node == 3 else None) for node in nodes}
258
+
259
+
260
+ class TestNodeDataViewDefaultSetOps(TestNodeDataViewSetOps):
261
+ @classmethod
262
+ def setup_class(cls):
263
+ cls.G = nx.path_graph(9)
264
+ cls.G.nodes[3]["foo"] = "bar"
265
+ cls.nv = cls.G.nodes.data("foo", default=1)
266
+
267
+ def n_its(self, nodes):
268
+ return {(node, "bar" if node == 3 else 1) for node in nodes}
269
+
270
+
271
+ # Edges Data View
272
+ class TestEdgeDataView:
273
+ @classmethod
274
+ def setup_class(cls):
275
+ cls.G = nx.path_graph(9)
276
+ cls.eview = nx.reportviews.EdgeView
277
+
278
+ def test_pickle(self):
279
+ import pickle
280
+
281
+ ev = self.eview(self.G)(data=True)
282
+ pev = pickle.loads(pickle.dumps(ev, -1))
283
+ assert list(ev) == list(pev)
284
+ assert ev.__slots__ == pev.__slots__
285
+
286
+ def modify_edge(self, G, e, **kwds):
287
+ G._adj[e[0]][e[1]].update(kwds)
288
+
289
+ def test_str(self):
290
+ ev = self.eview(self.G)(data=True)
291
+ rep = str([(n, n + 1, {}) for n in range(8)])
292
+ assert str(ev) == rep
293
+
294
+ def test_repr(self):
295
+ ev = self.eview(self.G)(data=True)
296
+ rep = (
297
+ "EdgeDataView([(0, 1, {}), (1, 2, {}), "
298
+ + "(2, 3, {}), (3, 4, {}), "
299
+ + "(4, 5, {}), (5, 6, {}), "
300
+ + "(6, 7, {}), (7, 8, {})])"
301
+ )
302
+ assert repr(ev) == rep
303
+
304
+ def test_iterdata(self):
305
+ G = self.G.copy()
306
+ evr = self.eview(G)
307
+ ev = evr(data=True)
308
+ ev_def = evr(data="foo", default=1)
309
+
310
+ for u, v, d in ev:
311
+ pass
312
+ assert d == {}
313
+
314
+ for u, v, wt in ev_def:
315
+ pass
316
+ assert wt == 1
317
+
318
+ self.modify_edge(G, (2, 3), foo="bar")
319
+ for e in ev:
320
+ assert len(e) == 3
321
+ if set(e[:2]) == {2, 3}:
322
+ assert e[2] == {"foo": "bar"}
323
+ checked = True
324
+ else:
325
+ assert e[2] == {}
326
+ assert checked
327
+
328
+ for e in ev_def:
329
+ assert len(e) == 3
330
+ if set(e[:2]) == {2, 3}:
331
+ assert e[2] == "bar"
332
+ checked_wt = True
333
+ else:
334
+ assert e[2] == 1
335
+ assert checked_wt
336
+
337
+ def test_iter(self):
338
+ evr = self.eview(self.G)
339
+ ev = evr()
340
+ for u, v in ev:
341
+ pass
342
+ iev = iter(ev)
343
+ assert next(iev) == (0, 1)
344
+ assert iter(ev) != ev
345
+ assert iter(iev) == iev
346
+
347
+ def test_contains(self):
348
+ evr = self.eview(self.G)
349
+ ev = evr()
350
+ if self.G.is_directed():
351
+ assert (1, 2) in ev and (2, 1) not in ev
352
+ else:
353
+ assert (1, 2) in ev and (2, 1) in ev
354
+ assert (1, 4) not in ev
355
+ assert (1, 90) not in ev
356
+ assert (90, 1) not in ev
357
+
358
+ def test_contains_with_nbunch(self):
359
+ evr = self.eview(self.G)
360
+ ev = evr(nbunch=[0, 2])
361
+ if self.G.is_directed():
362
+ assert (0, 1) in ev
363
+ assert (1, 2) not in ev
364
+ assert (2, 3) in ev
365
+ else:
366
+ assert (0, 1) in ev
367
+ assert (1, 2) in ev
368
+ assert (2, 3) in ev
369
+ assert (3, 4) not in ev
370
+ assert (4, 5) not in ev
371
+ assert (5, 6) not in ev
372
+ assert (7, 8) not in ev
373
+ assert (8, 9) not in ev
374
+
375
+ def test_len(self):
376
+ evr = self.eview(self.G)
377
+ ev = evr(data="foo")
378
+ assert len(ev) == 8
379
+ assert len(evr(1)) == 2
380
+ assert len(evr([1, 2, 3])) == 4
381
+
382
+ assert len(self.G.edges(1)) == 2
383
+ assert len(self.G.edges()) == 8
384
+ assert len(self.G.edges) == 8
385
+
386
+ H = self.G.copy()
387
+ H.add_edge(1, 1)
388
+ assert len(H.edges(1)) == 3
389
+ assert len(H.edges()) == 9
390
+ assert len(H.edges) == 9
391
+
392
+
393
+ class TestOutEdgeDataView(TestEdgeDataView):
394
+ @classmethod
395
+ def setup_class(cls):
396
+ cls.G = nx.path_graph(9, create_using=nx.DiGraph())
397
+ cls.eview = nx.reportviews.OutEdgeView
398
+
399
+ def test_repr(self):
400
+ ev = self.eview(self.G)(data=True)
401
+ rep = (
402
+ "OutEdgeDataView([(0, 1, {}), (1, 2, {}), "
403
+ + "(2, 3, {}), (3, 4, {}), "
404
+ + "(4, 5, {}), (5, 6, {}), "
405
+ + "(6, 7, {}), (7, 8, {})])"
406
+ )
407
+ assert repr(ev) == rep
408
+
409
+ def test_len(self):
410
+ evr = self.eview(self.G)
411
+ ev = evr(data="foo")
412
+ assert len(ev) == 8
413
+ assert len(evr(1)) == 1
414
+ assert len(evr([1, 2, 3])) == 3
415
+
416
+ assert len(self.G.edges(1)) == 1
417
+ assert len(self.G.edges()) == 8
418
+ assert len(self.G.edges) == 8
419
+
420
+ H = self.G.copy()
421
+ H.add_edge(1, 1)
422
+ assert len(H.edges(1)) == 2
423
+ assert len(H.edges()) == 9
424
+ assert len(H.edges) == 9
425
+
426
+ def test_contains_with_nbunch(self):
427
+ evr = self.eview(self.G)
428
+ ev = evr(nbunch=[0, 2])
429
+ assert (0, 1) in ev
430
+ assert (1, 2) not in ev
431
+ assert (2, 3) in ev
432
+ assert (3, 4) not in ev
433
+ assert (4, 5) not in ev
434
+ assert (5, 6) not in ev
435
+ assert (7, 8) not in ev
436
+ assert (8, 9) not in ev
437
+
438
+
439
+ class TestInEdgeDataView(TestOutEdgeDataView):
440
+ @classmethod
441
+ def setup_class(cls):
442
+ cls.G = nx.path_graph(9, create_using=nx.DiGraph())
443
+ cls.eview = nx.reportviews.InEdgeView
444
+
445
+ def test_repr(self):
446
+ ev = self.eview(self.G)(data=True)
447
+ rep = (
448
+ "InEdgeDataView([(0, 1, {}), (1, 2, {}), "
449
+ + "(2, 3, {}), (3, 4, {}), "
450
+ + "(4, 5, {}), (5, 6, {}), "
451
+ + "(6, 7, {}), (7, 8, {})])"
452
+ )
453
+ assert repr(ev) == rep
454
+
455
+ def test_contains_with_nbunch(self):
456
+ evr = self.eview(self.G)
457
+ ev = evr(nbunch=[0, 2])
458
+ assert (0, 1) not in ev
459
+ assert (1, 2) in ev
460
+ assert (2, 3) not in ev
461
+ assert (3, 4) not in ev
462
+ assert (4, 5) not in ev
463
+ assert (5, 6) not in ev
464
+ assert (7, 8) not in ev
465
+ assert (8, 9) not in ev
466
+
467
+
468
+ class TestMultiEdgeDataView(TestEdgeDataView):
469
+ @classmethod
470
+ def setup_class(cls):
471
+ cls.G = nx.path_graph(9, create_using=nx.MultiGraph())
472
+ cls.eview = nx.reportviews.MultiEdgeView
473
+
474
+ def modify_edge(self, G, e, **kwds):
475
+ G._adj[e[0]][e[1]][0].update(kwds)
476
+
477
+ def test_repr(self):
478
+ ev = self.eview(self.G)(data=True)
479
+ rep = (
480
+ "MultiEdgeDataView([(0, 1, {}), (1, 2, {}), "
481
+ + "(2, 3, {}), (3, 4, {}), "
482
+ + "(4, 5, {}), (5, 6, {}), "
483
+ + "(6, 7, {}), (7, 8, {})])"
484
+ )
485
+ assert repr(ev) == rep
486
+
487
+ def test_contains_with_nbunch(self):
488
+ evr = self.eview(self.G)
489
+ ev = evr(nbunch=[0, 2])
490
+ assert (0, 1) in ev
491
+ assert (1, 2) in ev
492
+ assert (2, 3) in ev
493
+ assert (3, 4) not in ev
494
+ assert (4, 5) not in ev
495
+ assert (5, 6) not in ev
496
+ assert (7, 8) not in ev
497
+ assert (8, 9) not in ev
498
+
499
+
500
+ class TestOutMultiEdgeDataView(TestOutEdgeDataView):
501
+ @classmethod
502
+ def setup_class(cls):
503
+ cls.G = nx.path_graph(9, create_using=nx.MultiDiGraph())
504
+ cls.eview = nx.reportviews.OutMultiEdgeView
505
+
506
+ def modify_edge(self, G, e, **kwds):
507
+ G._adj[e[0]][e[1]][0].update(kwds)
508
+
509
+ def test_repr(self):
510
+ ev = self.eview(self.G)(data=True)
511
+ rep = (
512
+ "OutMultiEdgeDataView([(0, 1, {}), (1, 2, {}), "
513
+ + "(2, 3, {}), (3, 4, {}), "
514
+ + "(4, 5, {}), (5, 6, {}), "
515
+ + "(6, 7, {}), (7, 8, {})])"
516
+ )
517
+ assert repr(ev) == rep
518
+
519
+ def test_contains_with_nbunch(self):
520
+ evr = self.eview(self.G)
521
+ ev = evr(nbunch=[0, 2])
522
+ assert (0, 1) in ev
523
+ assert (1, 2) not in ev
524
+ assert (2, 3) in ev
525
+ assert (3, 4) not in ev
526
+ assert (4, 5) not in ev
527
+ assert (5, 6) not in ev
528
+ assert (7, 8) not in ev
529
+ assert (8, 9) not in ev
530
+
531
+
532
+ class TestInMultiEdgeDataView(TestOutMultiEdgeDataView):
533
+ @classmethod
534
+ def setup_class(cls):
535
+ cls.G = nx.path_graph(9, create_using=nx.MultiDiGraph())
536
+ cls.eview = nx.reportviews.InMultiEdgeView
537
+
538
+ def test_repr(self):
539
+ ev = self.eview(self.G)(data=True)
540
+ rep = (
541
+ "InMultiEdgeDataView([(0, 1, {}), (1, 2, {}), "
542
+ + "(2, 3, {}), (3, 4, {}), "
543
+ + "(4, 5, {}), (5, 6, {}), "
544
+ + "(6, 7, {}), (7, 8, {})])"
545
+ )
546
+ assert repr(ev) == rep
547
+
548
+ def test_contains_with_nbunch(self):
549
+ evr = self.eview(self.G)
550
+ ev = evr(nbunch=[0, 2])
551
+ assert (0, 1) not in ev
552
+ assert (1, 2) in ev
553
+ assert (2, 3) not in ev
554
+ assert (3, 4) not in ev
555
+ assert (4, 5) not in ev
556
+ assert (5, 6) not in ev
557
+ assert (7, 8) not in ev
558
+ assert (8, 9) not in ev
559
+
560
+
561
+ # Edge Views
562
+ class TestEdgeView:
563
+ @classmethod
564
+ def setup_class(cls):
565
+ cls.G = nx.path_graph(9)
566
+ cls.eview = nx.reportviews.EdgeView
567
+
568
+ def test_pickle(self):
569
+ import pickle
570
+
571
+ ev = self.eview(self.G)
572
+ pev = pickle.loads(pickle.dumps(ev, -1))
573
+ assert ev == pev
574
+ assert ev.__slots__ == pev.__slots__
575
+
576
+ def modify_edge(self, G, e, **kwds):
577
+ G._adj[e[0]][e[1]].update(kwds)
578
+
579
+ def test_str(self):
580
+ ev = self.eview(self.G)
581
+ rep = str([(n, n + 1) for n in range(8)])
582
+ assert str(ev) == rep
583
+
584
+ def test_repr(self):
585
+ ev = self.eview(self.G)
586
+ rep = (
587
+ "EdgeView([(0, 1), (1, 2), (2, 3), (3, 4), "
588
+ + "(4, 5), (5, 6), (6, 7), (7, 8)])"
589
+ )
590
+ assert repr(ev) == rep
591
+
592
+ def test_getitem(self):
593
+ G = self.G.copy()
594
+ ev = G.edges
595
+ G.edges[0, 1]["foo"] = "bar"
596
+ assert ev[0, 1] == {"foo": "bar"}
597
+
598
+ # slicing
599
+ with pytest.raises(nx.NetworkXError, match=".*does not support slicing"):
600
+ G.edges[0:5]
601
+
602
+ # Invalid edge
603
+ with pytest.raises(KeyError, match=r".*edge.*is not in the graph."):
604
+ G.edges[0, 9]
605
+
606
+ def test_call(self):
607
+ ev = self.eview(self.G)
608
+ assert id(ev) == id(ev())
609
+ assert id(ev) == id(ev(data=False))
610
+ assert id(ev) != id(ev(data=True))
611
+ assert id(ev) != id(ev(nbunch=1))
612
+
613
+ def test_data(self):
614
+ ev = self.eview(self.G)
615
+ assert id(ev) != id(ev.data())
616
+ assert id(ev) == id(ev.data(data=False))
617
+ assert id(ev) != id(ev.data(data=True))
618
+ assert id(ev) != id(ev.data(nbunch=1))
619
+
620
+ def test_iter(self):
621
+ ev = self.eview(self.G)
622
+ for u, v in ev:
623
+ pass
624
+ iev = iter(ev)
625
+ assert next(iev) == (0, 1)
626
+ assert iter(ev) != ev
627
+ assert iter(iev) == iev
628
+
629
+ def test_contains(self):
630
+ ev = self.eview(self.G)
631
+ edv = ev()
632
+ if self.G.is_directed():
633
+ assert (1, 2) in ev and (2, 1) not in ev
634
+ assert (1, 2) in edv and (2, 1) not in edv
635
+ else:
636
+ assert (1, 2) in ev and (2, 1) in ev
637
+ assert (1, 2) in edv and (2, 1) in edv
638
+ assert (1, 4) not in ev
639
+ assert (1, 4) not in edv
640
+ # edge not in graph
641
+ assert (1, 90) not in ev
642
+ assert (90, 1) not in ev
643
+ assert (1, 90) not in edv
644
+ assert (90, 1) not in edv
645
+
646
+ def test_contains_with_nbunch(self):
647
+ ev = self.eview(self.G)
648
+ evn = ev(nbunch=[0, 2])
649
+ assert (0, 1) in evn
650
+ assert (1, 2) in evn
651
+ assert (2, 3) in evn
652
+ assert (3, 4) not in evn
653
+ assert (4, 5) not in evn
654
+ assert (5, 6) not in evn
655
+ assert (7, 8) not in evn
656
+ assert (8, 9) not in evn
657
+
658
+ def test_len(self):
659
+ ev = self.eview(self.G)
660
+ num_ed = 9 if self.G.is_multigraph() else 8
661
+ assert len(ev) == num_ed
662
+
663
+ H = self.G.copy()
664
+ H.add_edge(1, 1)
665
+ assert len(H.edges(1)) == 3 + H.is_multigraph() - H.is_directed()
666
+ assert len(H.edges()) == num_ed + 1
667
+ assert len(H.edges) == num_ed + 1
668
+
669
+ def test_and(self):
670
+ # print("G & H edges:", gnv & hnv)
671
+ ev = self.eview(self.G)
672
+ some_edges = {(0, 1), (1, 0), (0, 2)}
673
+ if self.G.is_directed():
674
+ assert some_edges & ev, {(0, 1)}
675
+ assert ev & some_edges, {(0, 1)}
676
+ else:
677
+ assert ev & some_edges == {(0, 1), (1, 0)}
678
+ assert some_edges & ev == {(0, 1), (1, 0)}
679
+ return
680
+
681
+ def test_or(self):
682
+ # print("G | H edges:", gnv | hnv)
683
+ ev = self.eview(self.G)
684
+ some_edges = {(0, 1), (1, 0), (0, 2)}
685
+ result1 = {(n, n + 1) for n in range(8)}
686
+ result1.update(some_edges)
687
+ result2 = {(n + 1, n) for n in range(8)}
688
+ result2.update(some_edges)
689
+ assert (ev | some_edges) in (result1, result2)
690
+ assert (some_edges | ev) in (result1, result2)
691
+
692
+ def test_xor(self):
693
+ # print("G ^ H edges:", gnv ^ hnv)
694
+ ev = self.eview(self.G)
695
+ some_edges = {(0, 1), (1, 0), (0, 2)}
696
+ if self.G.is_directed():
697
+ result = {(n, n + 1) for n in range(1, 8)}
698
+ result.update({(1, 0), (0, 2)})
699
+ assert ev ^ some_edges == result
700
+ else:
701
+ result = {(n, n + 1) for n in range(1, 8)}
702
+ result.update({(0, 2)})
703
+ assert ev ^ some_edges == result
704
+ return
705
+
706
+ def test_sub(self):
707
+ # print("G - H edges:", gnv - hnv)
708
+ ev = self.eview(self.G)
709
+ some_edges = {(0, 1), (1, 0), (0, 2)}
710
+ result = {(n, n + 1) for n in range(8)}
711
+ result.remove((0, 1))
712
+ assert ev - some_edges, result
713
+
714
+
715
+ class TestOutEdgeView(TestEdgeView):
716
+ @classmethod
717
+ def setup_class(cls):
718
+ cls.G = nx.path_graph(9, nx.DiGraph())
719
+ cls.eview = nx.reportviews.OutEdgeView
720
+
721
+ def test_repr(self):
722
+ ev = self.eview(self.G)
723
+ rep = (
724
+ "OutEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), "
725
+ + "(4, 5), (5, 6), (6, 7), (7, 8)])"
726
+ )
727
+ assert repr(ev) == rep
728
+
729
+ def test_contains_with_nbunch(self):
730
+ ev = self.eview(self.G)
731
+ evn = ev(nbunch=[0, 2])
732
+ assert (0, 1) in evn
733
+ assert (1, 2) not in evn
734
+ assert (2, 3) in evn
735
+ assert (3, 4) not in evn
736
+ assert (4, 5) not in evn
737
+ assert (5, 6) not in evn
738
+ assert (7, 8) not in evn
739
+ assert (8, 9) not in evn
740
+
741
+
742
+ class TestInEdgeView(TestEdgeView):
743
+ @classmethod
744
+ def setup_class(cls):
745
+ cls.G = nx.path_graph(9, nx.DiGraph())
746
+ cls.eview = nx.reportviews.InEdgeView
747
+
748
+ def test_repr(self):
749
+ ev = self.eview(self.G)
750
+ rep = (
751
+ "InEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), "
752
+ + "(4, 5), (5, 6), (6, 7), (7, 8)])"
753
+ )
754
+ assert repr(ev) == rep
755
+
756
+ def test_contains_with_nbunch(self):
757
+ ev = self.eview(self.G)
758
+ evn = ev(nbunch=[0, 2])
759
+ assert (0, 1) not in evn
760
+ assert (1, 2) in evn
761
+ assert (2, 3) not in evn
762
+ assert (3, 4) not in evn
763
+ assert (4, 5) not in evn
764
+ assert (5, 6) not in evn
765
+ assert (7, 8) not in evn
766
+ assert (8, 9) not in evn
767
+
768
+
769
+ class TestMultiEdgeView(TestEdgeView):
770
+ @classmethod
771
+ def setup_class(cls):
772
+ cls.G = nx.path_graph(9, nx.MultiGraph())
773
+ cls.G.add_edge(1, 2, key=3, foo="bar")
774
+ cls.eview = nx.reportviews.MultiEdgeView
775
+
776
+ def modify_edge(self, G, e, **kwds):
777
+ if len(e) == 2:
778
+ e = e + (0,)
779
+ G._adj[e[0]][e[1]][e[2]].update(kwds)
780
+
781
+ def test_str(self):
782
+ ev = self.eview(self.G)
783
+ replist = [(n, n + 1, 0) for n in range(8)]
784
+ replist.insert(2, (1, 2, 3))
785
+ rep = str(replist)
786
+ assert str(ev) == rep
787
+
788
+ def test_getitem(self):
789
+ G = self.G.copy()
790
+ ev = G.edges
791
+ G.edges[0, 1, 0]["foo"] = "bar"
792
+ assert ev[0, 1, 0] == {"foo": "bar"}
793
+
794
+ # slicing
795
+ with pytest.raises(nx.NetworkXError):
796
+ G.edges[0:5]
797
+
798
+ def test_repr(self):
799
+ ev = self.eview(self.G)
800
+ rep = (
801
+ "MultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), "
802
+ + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])"
803
+ )
804
+ assert repr(ev) == rep
805
+
806
+ def test_call(self):
807
+ ev = self.eview(self.G)
808
+ assert id(ev) == id(ev(keys=True))
809
+ assert id(ev) == id(ev(data=False, keys=True))
810
+ assert id(ev) != id(ev(keys=False))
811
+ assert id(ev) != id(ev(data=True))
812
+ assert id(ev) != id(ev(nbunch=1))
813
+
814
+ def test_data(self):
815
+ ev = self.eview(self.G)
816
+ assert id(ev) != id(ev.data())
817
+ assert id(ev) == id(ev.data(data=False, keys=True))
818
+ assert id(ev) != id(ev.data(keys=False))
819
+ assert id(ev) != id(ev.data(data=True))
820
+ assert id(ev) != id(ev.data(nbunch=1))
821
+
822
+ def test_iter(self):
823
+ ev = self.eview(self.G)
824
+ for u, v, k in ev:
825
+ pass
826
+ iev = iter(ev)
827
+ assert next(iev) == (0, 1, 0)
828
+ assert iter(ev) != ev
829
+ assert iter(iev) == iev
830
+
831
+ def test_iterkeys(self):
832
+ G = self.G
833
+ evr = self.eview(G)
834
+ ev = evr(keys=True)
835
+ for u, v, k in ev:
836
+ pass
837
+ assert k == 0
838
+ ev = evr(keys=True, data="foo", default=1)
839
+ for u, v, k, wt in ev:
840
+ pass
841
+ assert wt == 1
842
+
843
+ self.modify_edge(G, (2, 3, 0), foo="bar")
844
+ ev = evr(keys=True, data=True)
845
+ for e in ev:
846
+ assert len(e) == 4
847
+ print("edge:", e)
848
+ if set(e[:2]) == {2, 3}:
849
+ print(self.G._adj[2][3])
850
+ assert e[2] == 0
851
+ assert e[3] == {"foo": "bar"}
852
+ checked = True
853
+ elif set(e[:3]) == {1, 2, 3}:
854
+ assert e[2] == 3
855
+ assert e[3] == {"foo": "bar"}
856
+ checked_multi = True
857
+ else:
858
+ assert e[2] == 0
859
+ assert e[3] == {}
860
+ assert checked
861
+ assert checked_multi
862
+ ev = evr(keys=True, data="foo", default=1)
863
+ for e in ev:
864
+ if set(e[:2]) == {1, 2} and e[2] == 3:
865
+ assert e[3] == "bar"
866
+ if set(e[:2]) == {1, 2} and e[2] == 0:
867
+ assert e[3] == 1
868
+ if set(e[:2]) == {2, 3}:
869
+ assert e[2] == 0
870
+ assert e[3] == "bar"
871
+ assert len(e) == 4
872
+ checked_wt = True
873
+ assert checked_wt
874
+ ev = evr(keys=True)
875
+ for e in ev:
876
+ assert len(e) == 3
877
+ elist = sorted([(i, i + 1, 0) for i in range(8)] + [(1, 2, 3)])
878
+ assert sorted(ev) == elist
879
+ # test that the keyword arguments are passed correctly
880
+ ev = evr((1, 2), "foo", keys=True, default=1)
881
+ with pytest.raises(TypeError):
882
+ evr((1, 2), "foo", True, 1)
883
+ with pytest.raises(TypeError):
884
+ evr((1, 2), "foo", True, default=1)
885
+ for e in ev:
886
+ if set(e[:2]) == {1, 2}:
887
+ assert e[2] in {0, 3}
888
+ if e[2] == 3:
889
+ assert e[3] == "bar"
890
+ else: # e[2] == 0
891
+ assert e[3] == 1
892
+ if G.is_directed():
893
+ assert len(list(ev)) == 3
894
+ else:
895
+ assert len(list(ev)) == 4
896
+
897
+ def test_or(self):
898
+ # print("G | H edges:", gnv | hnv)
899
+ ev = self.eview(self.G)
900
+ some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)}
901
+ result = {(n, n + 1, 0) for n in range(8)}
902
+ result.update(some_edges)
903
+ result.update({(1, 2, 3)})
904
+ assert ev | some_edges == result
905
+ assert some_edges | ev == result
906
+
907
+ def test_sub(self):
908
+ # print("G - H edges:", gnv - hnv)
909
+ ev = self.eview(self.G)
910
+ some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)}
911
+ result = {(n, n + 1, 0) for n in range(8)}
912
+ result.remove((0, 1, 0))
913
+ result.update({(1, 2, 3)})
914
+ assert ev - some_edges, result
915
+ assert some_edges - ev, result
916
+
917
+ def test_xor(self):
918
+ # print("G ^ H edges:", gnv ^ hnv)
919
+ ev = self.eview(self.G)
920
+ some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)}
921
+ if self.G.is_directed():
922
+ result = {(n, n + 1, 0) for n in range(1, 8)}
923
+ result.update({(1, 0, 0), (0, 2, 0), (1, 2, 3)})
924
+ assert ev ^ some_edges == result
925
+ assert some_edges ^ ev == result
926
+ else:
927
+ result = {(n, n + 1, 0) for n in range(1, 8)}
928
+ result.update({(0, 2, 0), (1, 2, 3)})
929
+ assert ev ^ some_edges == result
930
+ assert some_edges ^ ev == result
931
+
932
+ def test_and(self):
933
+ # print("G & H edges:", gnv & hnv)
934
+ ev = self.eview(self.G)
935
+ some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)}
936
+ if self.G.is_directed():
937
+ assert ev & some_edges == {(0, 1, 0)}
938
+ assert some_edges & ev == {(0, 1, 0)}
939
+ else:
940
+ assert ev & some_edges == {(0, 1, 0), (1, 0, 0)}
941
+ assert some_edges & ev == {(0, 1, 0), (1, 0, 0)}
942
+
943
+ def test_contains_with_nbunch(self):
944
+ ev = self.eview(self.G)
945
+ evn = ev(nbunch=[0, 2])
946
+ assert (0, 1) in evn
947
+ assert (1, 2) in evn
948
+ assert (2, 3) in evn
949
+ assert (3, 4) not in evn
950
+ assert (4, 5) not in evn
951
+ assert (5, 6) not in evn
952
+ assert (7, 8) not in evn
953
+ assert (8, 9) not in evn
954
+
955
+
956
+ class TestOutMultiEdgeView(TestMultiEdgeView):
957
+ @classmethod
958
+ def setup_class(cls):
959
+ cls.G = nx.path_graph(9, nx.MultiDiGraph())
960
+ cls.G.add_edge(1, 2, key=3, foo="bar")
961
+ cls.eview = nx.reportviews.OutMultiEdgeView
962
+
963
+ def modify_edge(self, G, e, **kwds):
964
+ if len(e) == 2:
965
+ e = e + (0,)
966
+ G._adj[e[0]][e[1]][e[2]].update(kwds)
967
+
968
+ def test_repr(self):
969
+ ev = self.eview(self.G)
970
+ rep = (
971
+ "OutMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0),"
972
+ + " (3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])"
973
+ )
974
+ assert repr(ev) == rep
975
+
976
+ def test_contains_with_nbunch(self):
977
+ ev = self.eview(self.G)
978
+ evn = ev(nbunch=[0, 2])
979
+ assert (0, 1) in evn
980
+ assert (1, 2) not in evn
981
+ assert (2, 3) in evn
982
+ assert (3, 4) not in evn
983
+ assert (4, 5) not in evn
984
+ assert (5, 6) not in evn
985
+ assert (7, 8) not in evn
986
+ assert (8, 9) not in evn
987
+
988
+
989
+ class TestInMultiEdgeView(TestMultiEdgeView):
990
+ @classmethod
991
+ def setup_class(cls):
992
+ cls.G = nx.path_graph(9, nx.MultiDiGraph())
993
+ cls.G.add_edge(1, 2, key=3, foo="bar")
994
+ cls.eview = nx.reportviews.InMultiEdgeView
995
+
996
+ def modify_edge(self, G, e, **kwds):
997
+ if len(e) == 2:
998
+ e = e + (0,)
999
+ G._adj[e[0]][e[1]][e[2]].update(kwds)
1000
+
1001
+ def test_repr(self):
1002
+ ev = self.eview(self.G)
1003
+ rep = (
1004
+ "InMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), "
1005
+ + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])"
1006
+ )
1007
+ assert repr(ev) == rep
1008
+
1009
+ def test_contains_with_nbunch(self):
1010
+ ev = self.eview(self.G)
1011
+ evn = ev(nbunch=[0, 2])
1012
+ assert (0, 1) not in evn
1013
+ assert (1, 2) in evn
1014
+ assert (2, 3) not in evn
1015
+ assert (3, 4) not in evn
1016
+ assert (4, 5) not in evn
1017
+ assert (5, 6) not in evn
1018
+ assert (7, 8) not in evn
1019
+ assert (8, 9) not in evn
1020
+
1021
+
1022
+ # Degrees
1023
+ class TestDegreeView:
1024
+ GRAPH = nx.Graph
1025
+ dview = nx.reportviews.DegreeView
1026
+
1027
+ @classmethod
1028
+ def setup_class(cls):
1029
+ cls.G = nx.path_graph(6, cls.GRAPH())
1030
+ cls.G.add_edge(1, 3, foo=2)
1031
+ cls.G.add_edge(1, 3, foo=3)
1032
+
1033
+ def test_pickle(self):
1034
+ import pickle
1035
+
1036
+ deg = self.G.degree
1037
+ pdeg = pickle.loads(pickle.dumps(deg, -1))
1038
+ assert dict(deg) == dict(pdeg)
1039
+
1040
+ def test_str(self):
1041
+ dv = self.dview(self.G)
1042
+ rep = str([(0, 1), (1, 3), (2, 2), (3, 3), (4, 2), (5, 1)])
1043
+ assert str(dv) == rep
1044
+ dv = self.G.degree()
1045
+ assert str(dv) == rep
1046
+
1047
+ def test_repr(self):
1048
+ dv = self.dview(self.G)
1049
+ rep = "DegreeView({0: 1, 1: 3, 2: 2, 3: 3, 4: 2, 5: 1})"
1050
+ assert repr(dv) == rep
1051
+
1052
+ def test_iter(self):
1053
+ dv = self.dview(self.G)
1054
+ for n, d in dv:
1055
+ pass
1056
+ idv = iter(dv)
1057
+ assert iter(dv) != dv
1058
+ assert iter(idv) == idv
1059
+ assert next(idv) == (0, dv[0])
1060
+ assert next(idv) == (1, dv[1])
1061
+ # weighted
1062
+ dv = self.dview(self.G, weight="foo")
1063
+ for n, d in dv:
1064
+ pass
1065
+ idv = iter(dv)
1066
+ assert iter(dv) != dv
1067
+ assert iter(idv) == idv
1068
+ assert next(idv) == (0, dv[0])
1069
+ assert next(idv) == (1, dv[1])
1070
+
1071
+ def test_nbunch(self):
1072
+ dv = self.dview(self.G)
1073
+ dvn = dv(0)
1074
+ assert dvn == 1
1075
+ dvn = dv([2, 3])
1076
+ assert sorted(dvn) == [(2, 2), (3, 3)]
1077
+
1078
+ def test_getitem(self):
1079
+ dv = self.dview(self.G)
1080
+ assert dv[0] == 1
1081
+ assert dv[1] == 3
1082
+ assert dv[2] == 2
1083
+ assert dv[3] == 3
1084
+ dv = self.dview(self.G, weight="foo")
1085
+ assert dv[0] == 1
1086
+ assert dv[1] == 5
1087
+ assert dv[2] == 2
1088
+ assert dv[3] == 5
1089
+
1090
+ def test_weight(self):
1091
+ dv = self.dview(self.G)
1092
+ dvw = dv(0, weight="foo")
1093
+ assert dvw == 1
1094
+ dvw = dv(1, weight="foo")
1095
+ assert dvw == 5
1096
+ dvw = dv([2, 3], weight="foo")
1097
+ assert sorted(dvw) == [(2, 2), (3, 5)]
1098
+ dvd = dict(dv(weight="foo"))
1099
+ assert dvd[0] == 1
1100
+ assert dvd[1] == 5
1101
+ assert dvd[2] == 2
1102
+ assert dvd[3] == 5
1103
+
1104
+ def test_len(self):
1105
+ dv = self.dview(self.G)
1106
+ assert len(dv) == 6
1107
+
1108
+
1109
+ class TestDiDegreeView(TestDegreeView):
1110
+ GRAPH = nx.DiGraph
1111
+ dview = nx.reportviews.DiDegreeView
1112
+
1113
+ def test_repr(self):
1114
+ dv = self.G.degree()
1115
+ rep = "DiDegreeView({0: 1, 1: 3, 2: 2, 3: 3, 4: 2, 5: 1})"
1116
+ assert repr(dv) == rep
1117
+
1118
+
1119
+ class TestOutDegreeView(TestDegreeView):
1120
+ GRAPH = nx.DiGraph
1121
+ dview = nx.reportviews.OutDegreeView
1122
+
1123
+ def test_str(self):
1124
+ dv = self.dview(self.G)
1125
+ rep = str([(0, 1), (1, 2), (2, 1), (3, 1), (4, 1), (5, 0)])
1126
+ assert str(dv) == rep
1127
+ dv = self.G.out_degree()
1128
+ assert str(dv) == rep
1129
+
1130
+ def test_repr(self):
1131
+ dv = self.G.out_degree()
1132
+ rep = "OutDegreeView({0: 1, 1: 2, 2: 1, 3: 1, 4: 1, 5: 0})"
1133
+ assert repr(dv) == rep
1134
+
1135
+ def test_nbunch(self):
1136
+ dv = self.dview(self.G)
1137
+ dvn = dv(0)
1138
+ assert dvn == 1
1139
+ dvn = dv([2, 3])
1140
+ assert sorted(dvn) == [(2, 1), (3, 1)]
1141
+
1142
+ def test_getitem(self):
1143
+ dv = self.dview(self.G)
1144
+ assert dv[0] == 1
1145
+ assert dv[1] == 2
1146
+ assert dv[2] == 1
1147
+ assert dv[3] == 1
1148
+ dv = self.dview(self.G, weight="foo")
1149
+ assert dv[0] == 1
1150
+ assert dv[1] == 4
1151
+ assert dv[2] == 1
1152
+ assert dv[3] == 1
1153
+
1154
+ def test_weight(self):
1155
+ dv = self.dview(self.G)
1156
+ dvw = dv(0, weight="foo")
1157
+ assert dvw == 1
1158
+ dvw = dv(1, weight="foo")
1159
+ assert dvw == 4
1160
+ dvw = dv([2, 3], weight="foo")
1161
+ assert sorted(dvw) == [(2, 1), (3, 1)]
1162
+ dvd = dict(dv(weight="foo"))
1163
+ assert dvd[0] == 1
1164
+ assert dvd[1] == 4
1165
+ assert dvd[2] == 1
1166
+ assert dvd[3] == 1
1167
+
1168
+
1169
+ class TestInDegreeView(TestDegreeView):
1170
+ GRAPH = nx.DiGraph
1171
+ dview = nx.reportviews.InDegreeView
1172
+
1173
+ def test_str(self):
1174
+ dv = self.dview(self.G)
1175
+ rep = str([(0, 0), (1, 1), (2, 1), (3, 2), (4, 1), (5, 1)])
1176
+ assert str(dv) == rep
1177
+ dv = self.G.in_degree()
1178
+ assert str(dv) == rep
1179
+
1180
+ def test_repr(self):
1181
+ dv = self.G.in_degree()
1182
+ rep = "InDegreeView({0: 0, 1: 1, 2: 1, 3: 2, 4: 1, 5: 1})"
1183
+ assert repr(dv) == rep
1184
+
1185
+ def test_nbunch(self):
1186
+ dv = self.dview(self.G)
1187
+ dvn = dv(0)
1188
+ assert dvn == 0
1189
+ dvn = dv([2, 3])
1190
+ assert sorted(dvn) == [(2, 1), (3, 2)]
1191
+
1192
+ def test_getitem(self):
1193
+ dv = self.dview(self.G)
1194
+ assert dv[0] == 0
1195
+ assert dv[1] == 1
1196
+ assert dv[2] == 1
1197
+ assert dv[3] == 2
1198
+ dv = self.dview(self.G, weight="foo")
1199
+ assert dv[0] == 0
1200
+ assert dv[1] == 1
1201
+ assert dv[2] == 1
1202
+ assert dv[3] == 4
1203
+
1204
+ def test_weight(self):
1205
+ dv = self.dview(self.G)
1206
+ dvw = dv(0, weight="foo")
1207
+ assert dvw == 0
1208
+ dvw = dv(1, weight="foo")
1209
+ assert dvw == 1
1210
+ dvw = dv([2, 3], weight="foo")
1211
+ assert sorted(dvw) == [(2, 1), (3, 4)]
1212
+ dvd = dict(dv(weight="foo"))
1213
+ assert dvd[0] == 0
1214
+ assert dvd[1] == 1
1215
+ assert dvd[2] == 1
1216
+ assert dvd[3] == 4
1217
+
1218
+
1219
+ class TestMultiDegreeView(TestDegreeView):
1220
+ GRAPH = nx.MultiGraph
1221
+ dview = nx.reportviews.MultiDegreeView
1222
+
1223
+ def test_str(self):
1224
+ dv = self.dview(self.G)
1225
+ rep = str([(0, 1), (1, 4), (2, 2), (3, 4), (4, 2), (5, 1)])
1226
+ assert str(dv) == rep
1227
+ dv = self.G.degree()
1228
+ assert str(dv) == rep
1229
+
1230
+ def test_repr(self):
1231
+ dv = self.G.degree()
1232
+ rep = "MultiDegreeView({0: 1, 1: 4, 2: 2, 3: 4, 4: 2, 5: 1})"
1233
+ assert repr(dv) == rep
1234
+
1235
+ def test_nbunch(self):
1236
+ dv = self.dview(self.G)
1237
+ dvn = dv(0)
1238
+ assert dvn == 1
1239
+ dvn = dv([2, 3])
1240
+ assert sorted(dvn) == [(2, 2), (3, 4)]
1241
+
1242
+ def test_getitem(self):
1243
+ dv = self.dview(self.G)
1244
+ assert dv[0] == 1
1245
+ assert dv[1] == 4
1246
+ assert dv[2] == 2
1247
+ assert dv[3] == 4
1248
+ dv = self.dview(self.G, weight="foo")
1249
+ assert dv[0] == 1
1250
+ assert dv[1] == 7
1251
+ assert dv[2] == 2
1252
+ assert dv[3] == 7
1253
+
1254
+ def test_weight(self):
1255
+ dv = self.dview(self.G)
1256
+ dvw = dv(0, weight="foo")
1257
+ assert dvw == 1
1258
+ dvw = dv(1, weight="foo")
1259
+ assert dvw == 7
1260
+ dvw = dv([2, 3], weight="foo")
1261
+ assert sorted(dvw) == [(2, 2), (3, 7)]
1262
+ dvd = dict(dv(weight="foo"))
1263
+ assert dvd[0] == 1
1264
+ assert dvd[1] == 7
1265
+ assert dvd[2] == 2
1266
+ assert dvd[3] == 7
1267
+
1268
+
1269
+ class TestDiMultiDegreeView(TestMultiDegreeView):
1270
+ GRAPH = nx.MultiDiGraph
1271
+ dview = nx.reportviews.DiMultiDegreeView
1272
+
1273
+ def test_repr(self):
1274
+ dv = self.G.degree()
1275
+ rep = "DiMultiDegreeView({0: 1, 1: 4, 2: 2, 3: 4, 4: 2, 5: 1})"
1276
+ assert repr(dv) == rep
1277
+
1278
+
1279
+ class TestOutMultiDegreeView(TestDegreeView):
1280
+ GRAPH = nx.MultiDiGraph
1281
+ dview = nx.reportviews.OutMultiDegreeView
1282
+
1283
+ def test_str(self):
1284
+ dv = self.dview(self.G)
1285
+ rep = str([(0, 1), (1, 3), (2, 1), (3, 1), (4, 1), (5, 0)])
1286
+ assert str(dv) == rep
1287
+ dv = self.G.out_degree()
1288
+ assert str(dv) == rep
1289
+
1290
+ def test_repr(self):
1291
+ dv = self.G.out_degree()
1292
+ rep = "OutMultiDegreeView({0: 1, 1: 3, 2: 1, 3: 1, 4: 1, 5: 0})"
1293
+ assert repr(dv) == rep
1294
+
1295
+ def test_nbunch(self):
1296
+ dv = self.dview(self.G)
1297
+ dvn = dv(0)
1298
+ assert dvn == 1
1299
+ dvn = dv([2, 3])
1300
+ assert sorted(dvn) == [(2, 1), (3, 1)]
1301
+
1302
+ def test_getitem(self):
1303
+ dv = self.dview(self.G)
1304
+ assert dv[0] == 1
1305
+ assert dv[1] == 3
1306
+ assert dv[2] == 1
1307
+ assert dv[3] == 1
1308
+ dv = self.dview(self.G, weight="foo")
1309
+ assert dv[0] == 1
1310
+ assert dv[1] == 6
1311
+ assert dv[2] == 1
1312
+ assert dv[3] == 1
1313
+
1314
+ def test_weight(self):
1315
+ dv = self.dview(self.G)
1316
+ dvw = dv(0, weight="foo")
1317
+ assert dvw == 1
1318
+ dvw = dv(1, weight="foo")
1319
+ assert dvw == 6
1320
+ dvw = dv([2, 3], weight="foo")
1321
+ assert sorted(dvw) == [(2, 1), (3, 1)]
1322
+ dvd = dict(dv(weight="foo"))
1323
+ assert dvd[0] == 1
1324
+ assert dvd[1] == 6
1325
+ assert dvd[2] == 1
1326
+ assert dvd[3] == 1
1327
+
1328
+
1329
+ class TestInMultiDegreeView(TestDegreeView):
1330
+ GRAPH = nx.MultiDiGraph
1331
+ dview = nx.reportviews.InMultiDegreeView
1332
+
1333
+ def test_str(self):
1334
+ dv = self.dview(self.G)
1335
+ rep = str([(0, 0), (1, 1), (2, 1), (3, 3), (4, 1), (5, 1)])
1336
+ assert str(dv) == rep
1337
+ dv = self.G.in_degree()
1338
+ assert str(dv) == rep
1339
+
1340
+ def test_repr(self):
1341
+ dv = self.G.in_degree()
1342
+ rep = "InMultiDegreeView({0: 0, 1: 1, 2: 1, 3: 3, 4: 1, 5: 1})"
1343
+ assert repr(dv) == rep
1344
+
1345
+ def test_nbunch(self):
1346
+ dv = self.dview(self.G)
1347
+ dvn = dv(0)
1348
+ assert dvn == 0
1349
+ dvn = dv([2, 3])
1350
+ assert sorted(dvn) == [(2, 1), (3, 3)]
1351
+
1352
+ def test_getitem(self):
1353
+ dv = self.dview(self.G)
1354
+ assert dv[0] == 0
1355
+ assert dv[1] == 1
1356
+ assert dv[2] == 1
1357
+ assert dv[3] == 3
1358
+ dv = self.dview(self.G, weight="foo")
1359
+ assert dv[0] == 0
1360
+ assert dv[1] == 1
1361
+ assert dv[2] == 1
1362
+ assert dv[3] == 6
1363
+
1364
+ def test_weight(self):
1365
+ dv = self.dview(self.G)
1366
+ dvw = dv(0, weight="foo")
1367
+ assert dvw == 0
1368
+ dvw = dv(1, weight="foo")
1369
+ assert dvw == 1
1370
+ dvw = dv([2, 3], weight="foo")
1371
+ assert sorted(dvw) == [(2, 1), (3, 6)]
1372
+ dvd = dict(dv(weight="foo"))
1373
+ assert dvd[0] == 0
1374
+ assert dvd[1] == 1
1375
+ assert dvd[2] == 1
1376
+ assert dvd[3] == 6
1377
+
1378
+
1379
+ @pytest.mark.parametrize(
1380
+ ("reportview", "err_msg_terms"),
1381
+ (
1382
+ (rv.NodeView, "list(G.nodes"),
1383
+ (rv.NodeDataView, "list(G.nodes.data"),
1384
+ (rv.EdgeView, "list(G.edges"),
1385
+ # Directed EdgeViews
1386
+ (rv.InEdgeView, "list(G.in_edges"),
1387
+ (rv.OutEdgeView, "list(G.edges"),
1388
+ # Multi EdgeViews
1389
+ (rv.MultiEdgeView, "list(G.edges"),
1390
+ (rv.InMultiEdgeView, "list(G.in_edges"),
1391
+ (rv.OutMultiEdgeView, "list(G.edges"),
1392
+ ),
1393
+ )
1394
+ def test_slicing_reportviews(reportview, err_msg_terms):
1395
+ G = nx.complete_graph(3)
1396
+ view = reportview(G)
1397
+ with pytest.raises(nx.NetworkXError) as exc:
1398
+ view[0:2]
1399
+ errmsg = str(exc.value)
1400
+ assert type(view).__name__ in errmsg
1401
+ assert err_msg_terms in errmsg
1402
+
1403
+
1404
+ @pytest.mark.parametrize(
1405
+ "graph", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph]
1406
+ )
1407
+ def test_cache_dict_get_set_state(graph):
1408
+ G = nx.path_graph(5, graph())
1409
+ G.nodes, G.edges, G.adj, G.degree
1410
+ if G.is_directed():
1411
+ G.pred, G.succ, G.in_edges, G.out_edges, G.in_degree, G.out_degree
1412
+ cached_dict = G.__dict__
1413
+ assert "nodes" in cached_dict
1414
+ assert "edges" in cached_dict
1415
+ assert "adj" in cached_dict
1416
+ assert "degree" in cached_dict
1417
+ if G.is_directed():
1418
+ assert "pred" in cached_dict
1419
+ assert "succ" in cached_dict
1420
+ assert "in_edges" in cached_dict
1421
+ assert "out_edges" in cached_dict
1422
+ assert "in_degree" in cached_dict
1423
+ assert "out_degree" in cached_dict
1424
+
1425
+ # Raises error if the cached properties and views do not work
1426
+ pickle.loads(pickle.dumps(G, -1))
1427
+ deepcopy(G)
venv/lib/python3.10/site-packages/networkx/classes/tests/test_special.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+
3
+ from .test_digraph import BaseDiGraphTester
4
+ from .test_digraph import TestDiGraph as _TestDiGraph
5
+ from .test_graph import BaseGraphTester
6
+ from .test_graph import TestGraph as _TestGraph
7
+ from .test_multidigraph import TestMultiDiGraph as _TestMultiDiGraph
8
+ from .test_multigraph import TestMultiGraph as _TestMultiGraph
9
+
10
+
11
+ def test_factories():
12
+ class mydict1(dict):
13
+ pass
14
+
15
+ class mydict2(dict):
16
+ pass
17
+
18
+ class mydict3(dict):
19
+ pass
20
+
21
+ class mydict4(dict):
22
+ pass
23
+
24
+ class mydict5(dict):
25
+ pass
26
+
27
+ for Graph in (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph):
28
+ # print("testing class: ", Graph.__name__)
29
+ class MyGraph(Graph):
30
+ node_dict_factory = mydict1
31
+ adjlist_outer_dict_factory = mydict2
32
+ adjlist_inner_dict_factory = mydict3
33
+ edge_key_dict_factory = mydict4
34
+ edge_attr_dict_factory = mydict5
35
+
36
+ G = MyGraph()
37
+ assert isinstance(G._node, mydict1)
38
+ assert isinstance(G._adj, mydict2)
39
+ G.add_node(1)
40
+ assert isinstance(G._adj[1], mydict3)
41
+ if G.is_directed():
42
+ assert isinstance(G._pred, mydict2)
43
+ assert isinstance(G._succ, mydict2)
44
+ assert isinstance(G._pred[1], mydict3)
45
+ G.add_edge(1, 2)
46
+ if G.is_multigraph():
47
+ assert isinstance(G._adj[1][2], mydict4)
48
+ assert isinstance(G._adj[1][2][0], mydict5)
49
+ else:
50
+ assert isinstance(G._adj[1][2], mydict5)
51
+
52
+
53
+ class TestSpecialGraph(_TestGraph):
54
+ def setup_method(self):
55
+ _TestGraph.setup_method(self)
56
+ self.Graph = nx.Graph
57
+
58
+
59
+ class TestThinGraph(BaseGraphTester):
60
+ def setup_method(self):
61
+ all_edge_dict = {"weight": 1}
62
+
63
+ class MyGraph(nx.Graph):
64
+ def edge_attr_dict_factory(self):
65
+ return all_edge_dict
66
+
67
+ self.Graph = MyGraph
68
+ # build dict-of-dict-of-dict K3
69
+ ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict)
70
+ self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}}
71
+ self.k3edges = [(0, 1), (0, 2), (1, 2)]
72
+ self.k3nodes = [0, 1, 2]
73
+ self.K3 = self.Graph()
74
+ self.K3._adj = self.k3adj
75
+ self.K3._node = {}
76
+ self.K3._node[0] = {}
77
+ self.K3._node[1] = {}
78
+ self.K3._node[2] = {}
79
+
80
+
81
+ class TestSpecialDiGraph(_TestDiGraph):
82
+ def setup_method(self):
83
+ _TestDiGraph.setup_method(self)
84
+ self.Graph = nx.DiGraph
85
+
86
+
87
+ class TestThinDiGraph(BaseDiGraphTester):
88
+ def setup_method(self):
89
+ all_edge_dict = {"weight": 1}
90
+
91
+ class MyGraph(nx.DiGraph):
92
+ def edge_attr_dict_factory(self):
93
+ return all_edge_dict
94
+
95
+ self.Graph = MyGraph
96
+ # build dict-of-dict-of-dict K3
97
+ ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict)
98
+ ed4, ed5, ed6 = (all_edge_dict, all_edge_dict, all_edge_dict)
99
+ self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed3, 2: ed4}, 2: {0: ed5, 1: ed6}}
100
+ self.k3edges = [(0, 1), (0, 2), (1, 2)]
101
+ self.k3nodes = [0, 1, 2]
102
+ self.K3 = self.Graph()
103
+ self.K3._succ = self.k3adj
104
+ # K3._adj is synced with K3._succ
105
+ self.K3._pred = {0: {1: ed3, 2: ed5}, 1: {0: ed1, 2: ed6}, 2: {0: ed2, 1: ed4}}
106
+ self.K3._node = {}
107
+ self.K3._node[0] = {}
108
+ self.K3._node[1] = {}
109
+ self.K3._node[2] = {}
110
+
111
+ ed1, ed2 = (all_edge_dict, all_edge_dict)
112
+ self.P3 = self.Graph()
113
+ self.P3._succ = {0: {1: ed1}, 1: {2: ed2}, 2: {}}
114
+ # P3._adj is synced with P3._succ
115
+ self.P3._pred = {0: {}, 1: {0: ed1}, 2: {1: ed2}}
116
+ self.P3._node = {}
117
+ self.P3._node[0] = {}
118
+ self.P3._node[1] = {}
119
+ self.P3._node[2] = {}
120
+
121
+
122
+ class TestSpecialMultiGraph(_TestMultiGraph):
123
+ def setup_method(self):
124
+ _TestMultiGraph.setup_method(self)
125
+ self.Graph = nx.MultiGraph
126
+
127
+
128
+ class TestSpecialMultiDiGraph(_TestMultiDiGraph):
129
+ def setup_method(self):
130
+ _TestMultiDiGraph.setup_method(self)
131
+ self.Graph = nx.MultiDiGraph
venv/lib/python3.10/site-packages/networkx/classes/tests/test_subgraphviews.py ADDED
@@ -0,0 +1,362 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.utils import edges_equal
5
+
6
+
7
+ class TestSubGraphView:
8
+ gview = staticmethod(nx.subgraph_view)
9
+ graph = nx.Graph
10
+ hide_edges_filter = staticmethod(nx.filters.hide_edges)
11
+ show_edges_filter = staticmethod(nx.filters.show_edges)
12
+
13
+ @classmethod
14
+ def setup_class(cls):
15
+ cls.G = nx.path_graph(9, create_using=cls.graph())
16
+ cls.hide_edges_w_hide_nodes = {(3, 4), (4, 5), (5, 6)}
17
+
18
+ def test_hidden_nodes(self):
19
+ hide_nodes = [4, 5, 111]
20
+ nodes_gone = nx.filters.hide_nodes(hide_nodes)
21
+ gview = self.gview
22
+ G = gview(self.G, filter_node=nodes_gone)
23
+ assert self.G.nodes - G.nodes == {4, 5}
24
+ assert self.G.edges - G.edges == self.hide_edges_w_hide_nodes
25
+ if G.is_directed():
26
+ assert list(G[3]) == []
27
+ assert list(G[2]) == [3]
28
+ else:
29
+ assert list(G[3]) == [2]
30
+ assert set(G[2]) == {1, 3}
31
+ pytest.raises(KeyError, G.__getitem__, 4)
32
+ pytest.raises(KeyError, G.__getitem__, 112)
33
+ pytest.raises(KeyError, G.__getitem__, 111)
34
+ assert G.degree(3) == (3 if G.is_multigraph() else 1)
35
+ assert G.size() == (7 if G.is_multigraph() else 5)
36
+
37
+ def test_hidden_edges(self):
38
+ hide_edges = [(2, 3), (8, 7), (222, 223)]
39
+ edges_gone = self.hide_edges_filter(hide_edges)
40
+ gview = self.gview
41
+ G = gview(self.G, filter_edge=edges_gone)
42
+ assert self.G.nodes == G.nodes
43
+ if G.is_directed():
44
+ assert self.G.edges - G.edges == {(2, 3)}
45
+ assert list(G[2]) == []
46
+ assert list(G.pred[3]) == []
47
+ assert list(G.pred[2]) == [1]
48
+ assert G.size() == 7
49
+ else:
50
+ assert self.G.edges - G.edges == {(2, 3), (7, 8)}
51
+ assert list(G[2]) == [1]
52
+ assert G.size() == 6
53
+ assert list(G[3]) == [4]
54
+ pytest.raises(KeyError, G.__getitem__, 221)
55
+ pytest.raises(KeyError, G.__getitem__, 222)
56
+ assert G.degree(3) == 1
57
+
58
+ def test_shown_node(self):
59
+ induced_subgraph = nx.filters.show_nodes([2, 3, 111])
60
+ gview = self.gview
61
+ G = gview(self.G, filter_node=induced_subgraph)
62
+ assert set(G.nodes) == {2, 3}
63
+ if G.is_directed():
64
+ assert list(G[3]) == []
65
+ else:
66
+ assert list(G[3]) == [2]
67
+ assert list(G[2]) == [3]
68
+ pytest.raises(KeyError, G.__getitem__, 4)
69
+ pytest.raises(KeyError, G.__getitem__, 112)
70
+ pytest.raises(KeyError, G.__getitem__, 111)
71
+ assert G.degree(3) == (3 if G.is_multigraph() else 1)
72
+ assert G.size() == (3 if G.is_multigraph() else 1)
73
+
74
+ def test_shown_edges(self):
75
+ show_edges = [(2, 3), (8, 7), (222, 223)]
76
+ edge_subgraph = self.show_edges_filter(show_edges)
77
+ G = self.gview(self.G, filter_edge=edge_subgraph)
78
+ assert self.G.nodes == G.nodes
79
+ if G.is_directed():
80
+ assert G.edges == {(2, 3)}
81
+ assert list(G[3]) == []
82
+ assert list(G[2]) == [3]
83
+ assert list(G.pred[3]) == [2]
84
+ assert list(G.pred[2]) == []
85
+ assert G.size() == 1
86
+ else:
87
+ assert G.edges == {(2, 3), (7, 8)}
88
+ assert list(G[3]) == [2]
89
+ assert list(G[2]) == [3]
90
+ assert G.size() == 2
91
+ pytest.raises(KeyError, G.__getitem__, 221)
92
+ pytest.raises(KeyError, G.__getitem__, 222)
93
+ assert G.degree(3) == 1
94
+
95
+
96
+ class TestSubDiGraphView(TestSubGraphView):
97
+ gview = staticmethod(nx.subgraph_view)
98
+ graph = nx.DiGraph
99
+ hide_edges_filter = staticmethod(nx.filters.hide_diedges)
100
+ show_edges_filter = staticmethod(nx.filters.show_diedges)
101
+ hide_edges = [(2, 3), (8, 7), (222, 223)]
102
+ excluded = {(2, 3), (3, 4), (4, 5), (5, 6)}
103
+
104
+ def test_inoutedges(self):
105
+ edges_gone = self.hide_edges_filter(self.hide_edges)
106
+ hide_nodes = [4, 5, 111]
107
+ nodes_gone = nx.filters.hide_nodes(hide_nodes)
108
+ G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone)
109
+
110
+ assert self.G.in_edges - G.in_edges == self.excluded
111
+ assert self.G.out_edges - G.out_edges == self.excluded
112
+
113
+ def test_pred(self):
114
+ edges_gone = self.hide_edges_filter(self.hide_edges)
115
+ hide_nodes = [4, 5, 111]
116
+ nodes_gone = nx.filters.hide_nodes(hide_nodes)
117
+ G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone)
118
+
119
+ assert list(G.pred[2]) == [1]
120
+ assert list(G.pred[6]) == []
121
+
122
+ def test_inout_degree(self):
123
+ edges_gone = self.hide_edges_filter(self.hide_edges)
124
+ hide_nodes = [4, 5, 111]
125
+ nodes_gone = nx.filters.hide_nodes(hide_nodes)
126
+ G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone)
127
+
128
+ assert G.degree(2) == 1
129
+ assert G.out_degree(2) == 0
130
+ assert G.in_degree(2) == 1
131
+ assert G.size() == 4
132
+
133
+
134
+ # multigraph
135
+ class TestMultiGraphView(TestSubGraphView):
136
+ gview = staticmethod(nx.subgraph_view)
137
+ graph = nx.MultiGraph
138
+ hide_edges_filter = staticmethod(nx.filters.hide_multiedges)
139
+ show_edges_filter = staticmethod(nx.filters.show_multiedges)
140
+
141
+ @classmethod
142
+ def setup_class(cls):
143
+ cls.G = nx.path_graph(9, create_using=cls.graph())
144
+ multiedges = {(2, 3, 4), (2, 3, 5)}
145
+ cls.G.add_edges_from(multiedges)
146
+ cls.hide_edges_w_hide_nodes = {(3, 4, 0), (4, 5, 0), (5, 6, 0)}
147
+
148
+ def test_hidden_edges(self):
149
+ hide_edges = [(2, 3, 4), (2, 3, 3), (8, 7, 0), (222, 223, 0)]
150
+ edges_gone = self.hide_edges_filter(hide_edges)
151
+ G = self.gview(self.G, filter_edge=edges_gone)
152
+ assert self.G.nodes == G.nodes
153
+ if G.is_directed():
154
+ assert self.G.edges - G.edges == {(2, 3, 4)}
155
+ assert list(G[3]) == [4]
156
+ assert list(G[2]) == [3]
157
+ assert list(G.pred[3]) == [2] # only one 2 but two edges
158
+ assert list(G.pred[2]) == [1]
159
+ assert G.size() == 9
160
+ else:
161
+ assert self.G.edges - G.edges == {(2, 3, 4), (7, 8, 0)}
162
+ assert list(G[3]) == [2, 4]
163
+ assert list(G[2]) == [1, 3]
164
+ assert G.size() == 8
165
+ assert G.degree(3) == 3
166
+ pytest.raises(KeyError, G.__getitem__, 221)
167
+ pytest.raises(KeyError, G.__getitem__, 222)
168
+
169
+ def test_shown_edges(self):
170
+ show_edges = [(2, 3, 4), (2, 3, 3), (8, 7, 0), (222, 223, 0)]
171
+ edge_subgraph = self.show_edges_filter(show_edges)
172
+ G = self.gview(self.G, filter_edge=edge_subgraph)
173
+ assert self.G.nodes == G.nodes
174
+ if G.is_directed():
175
+ assert G.edges == {(2, 3, 4)}
176
+ assert list(G[3]) == []
177
+ assert list(G.pred[3]) == [2]
178
+ assert list(G.pred[2]) == []
179
+ assert G.size() == 1
180
+ else:
181
+ assert G.edges == {(2, 3, 4), (7, 8, 0)}
182
+ assert G.size() == 2
183
+ assert list(G[3]) == [2]
184
+ assert G.degree(3) == 1
185
+ assert list(G[2]) == [3]
186
+ pytest.raises(KeyError, G.__getitem__, 221)
187
+ pytest.raises(KeyError, G.__getitem__, 222)
188
+
189
+
190
+ # multidigraph
191
+ class TestMultiDiGraphView(TestMultiGraphView, TestSubDiGraphView):
192
+ gview = staticmethod(nx.subgraph_view)
193
+ graph = nx.MultiDiGraph
194
+ hide_edges_filter = staticmethod(nx.filters.hide_multidiedges)
195
+ show_edges_filter = staticmethod(nx.filters.show_multidiedges)
196
+ hide_edges = [(2, 3, 0), (8, 7, 0), (222, 223, 0)]
197
+ excluded = {(2, 3, 0), (3, 4, 0), (4, 5, 0), (5, 6, 0)}
198
+
199
+ def test_inout_degree(self):
200
+ edges_gone = self.hide_edges_filter(self.hide_edges)
201
+ hide_nodes = [4, 5, 111]
202
+ nodes_gone = nx.filters.hide_nodes(hide_nodes)
203
+ G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone)
204
+
205
+ assert G.degree(2) == 3
206
+ assert G.out_degree(2) == 2
207
+ assert G.in_degree(2) == 1
208
+ assert G.size() == 6
209
+
210
+
211
+ # induced_subgraph
212
+ class TestInducedSubGraph:
213
+ @classmethod
214
+ def setup_class(cls):
215
+ cls.K3 = G = nx.complete_graph(3)
216
+ G.graph["foo"] = []
217
+ G.nodes[0]["foo"] = []
218
+ G.remove_edge(1, 2)
219
+ ll = []
220
+ G.add_edge(1, 2, foo=ll)
221
+ G.add_edge(2, 1, foo=ll)
222
+
223
+ def test_full_graph(self):
224
+ G = self.K3
225
+ H = nx.induced_subgraph(G, [0, 1, 2, 5])
226
+ assert H.name == G.name
227
+ self.graphs_equal(H, G)
228
+ self.same_attrdict(H, G)
229
+
230
+ def test_partial_subgraph(self):
231
+ G = self.K3
232
+ H = nx.induced_subgraph(G, 0)
233
+ assert dict(H.adj) == {0: {}}
234
+ assert dict(G.adj) != {0: {}}
235
+
236
+ H = nx.induced_subgraph(G, [0, 1])
237
+ assert dict(H.adj) == {0: {1: {}}, 1: {0: {}}}
238
+
239
+ def same_attrdict(self, H, G):
240
+ old_foo = H[1][2]["foo"]
241
+ H.edges[1, 2]["foo"] = "baz"
242
+ assert G.edges == H.edges
243
+ H.edges[1, 2]["foo"] = old_foo
244
+ assert G.edges == H.edges
245
+ old_foo = H.nodes[0]["foo"]
246
+ H.nodes[0]["foo"] = "baz"
247
+ assert G.nodes == H.nodes
248
+ H.nodes[0]["foo"] = old_foo
249
+ assert G.nodes == H.nodes
250
+
251
+ def graphs_equal(self, H, G):
252
+ assert G._adj == H._adj
253
+ assert G._node == H._node
254
+ assert G.graph == H.graph
255
+ assert G.name == H.name
256
+ if not G.is_directed() and not H.is_directed():
257
+ assert H._adj[1][2] is H._adj[2][1]
258
+ assert G._adj[1][2] is G._adj[2][1]
259
+ else: # at least one is directed
260
+ if not G.is_directed():
261
+ G._pred = G._adj
262
+ G._succ = G._adj
263
+ if not H.is_directed():
264
+ H._pred = H._adj
265
+ H._succ = H._adj
266
+ assert G._pred == H._pred
267
+ assert G._succ == H._succ
268
+ assert H._succ[1][2] is H._pred[2][1]
269
+ assert G._succ[1][2] is G._pred[2][1]
270
+
271
+
272
+ # edge_subgraph
273
+ class TestEdgeSubGraph:
274
+ @classmethod
275
+ def setup_class(cls):
276
+ # Create a path graph on five nodes.
277
+ cls.G = G = nx.path_graph(5)
278
+ # Add some node, edge, and graph attributes.
279
+ for i in range(5):
280
+ G.nodes[i]["name"] = f"node{i}"
281
+ G.edges[0, 1]["name"] = "edge01"
282
+ G.edges[3, 4]["name"] = "edge34"
283
+ G.graph["name"] = "graph"
284
+ # Get the subgraph induced by the first and last edges.
285
+ cls.H = nx.edge_subgraph(G, [(0, 1), (3, 4)])
286
+
287
+ def test_correct_nodes(self):
288
+ """Tests that the subgraph has the correct nodes."""
289
+ assert [(0, "node0"), (1, "node1"), (3, "node3"), (4, "node4")] == sorted(
290
+ self.H.nodes.data("name")
291
+ )
292
+
293
+ def test_correct_edges(self):
294
+ """Tests that the subgraph has the correct edges."""
295
+ assert edges_equal(
296
+ [(0, 1, "edge01"), (3, 4, "edge34")], self.H.edges.data("name")
297
+ )
298
+
299
+ def test_add_node(self):
300
+ """Tests that adding a node to the original graph does not
301
+ affect the nodes of the subgraph.
302
+
303
+ """
304
+ self.G.add_node(5)
305
+ assert [0, 1, 3, 4] == sorted(self.H.nodes)
306
+ self.G.remove_node(5)
307
+
308
+ def test_remove_node(self):
309
+ """Tests that removing a node in the original graph
310
+ removes the nodes of the subgraph.
311
+
312
+ """
313
+ self.G.remove_node(0)
314
+ assert [1, 3, 4] == sorted(self.H.nodes)
315
+ self.G.add_node(0, name="node0")
316
+ self.G.add_edge(0, 1, name="edge01")
317
+
318
+ def test_node_attr_dict(self):
319
+ """Tests that the node attribute dictionary of the two graphs is
320
+ the same object.
321
+
322
+ """
323
+ for v in self.H:
324
+ assert self.G.nodes[v] == self.H.nodes[v]
325
+ # Making a change to G should make a change in H and vice versa.
326
+ self.G.nodes[0]["name"] = "foo"
327
+ assert self.G.nodes[0] == self.H.nodes[0]
328
+ self.H.nodes[1]["name"] = "bar"
329
+ assert self.G.nodes[1] == self.H.nodes[1]
330
+ # Revert the change, so tests pass with pytest-randomly
331
+ self.G.nodes[0]["name"] = "node0"
332
+ self.H.nodes[1]["name"] = "node1"
333
+
334
+ def test_edge_attr_dict(self):
335
+ """Tests that the edge attribute dictionary of the two graphs is
336
+ the same object.
337
+
338
+ """
339
+ for u, v in self.H.edges():
340
+ assert self.G.edges[u, v] == self.H.edges[u, v]
341
+ # Making a change to G should make a change in H and vice versa.
342
+ self.G.edges[0, 1]["name"] = "foo"
343
+ assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"]
344
+ self.H.edges[3, 4]["name"] = "bar"
345
+ assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"]
346
+ # Revert the change, so tests pass with pytest-randomly
347
+ self.G.edges[0, 1]["name"] = "edge01"
348
+ self.H.edges[3, 4]["name"] = "edge34"
349
+
350
+ def test_graph_attr_dict(self):
351
+ """Tests that the graph attribute dictionary of the two graphs
352
+ is the same object.
353
+
354
+ """
355
+ assert self.G.graph is self.H.graph
356
+
357
+ def test_readonly(self):
358
+ """Tests that the subgraph cannot change the graph structure"""
359
+ pytest.raises(nx.NetworkXError, self.H.add_node, 5)
360
+ pytest.raises(nx.NetworkXError, self.H.remove_node, 0)
361
+ pytest.raises(nx.NetworkXError, self.H.add_edge, 5, 6)
362
+ pytest.raises(nx.NetworkXError, self.H.remove_edge, 0, 1)
venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_asciidoc.cpython-310.pyc ADDED
Binary file (5.62 kB). View file
 
venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_borderless.cpython-310.pyc ADDED
Binary file (1.58 kB). View file
 
venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_html.cpython-310.pyc ADDED
Binary file (5.79 kB). View file
 
venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_json.cpython-310.pyc ADDED
Binary file (6.06 kB). View file
 
venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_jsonlines.cpython-310.pyc ADDED
Binary file (1.87 kB). View file
 
venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_rst.cpython-310.pyc ADDED
Binary file (8.34 kB). View file
 
venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_toml.cpython-310.pyc ADDED
Binary file (3.06 kB). View file
 
venv/lib/python3.10/site-packages/pytablewriter/writer/text/__pycache__/_unicode.cpython-310.pyc ADDED
Binary file (3.4 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__init__.py ADDED
@@ -0,0 +1,211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ # Set default logging handler to avoid "No handler found" warnings.
8
+ import logging
9
+ import sys
10
+ import typing
11
+ import warnings
12
+ from logging import NullHandler
13
+
14
+ from . import exceptions
15
+ from ._base_connection import _TYPE_BODY
16
+ from ._collections import HTTPHeaderDict
17
+ from ._version import __version__
18
+ from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
19
+ from .filepost import _TYPE_FIELDS, encode_multipart_formdata
20
+ from .poolmanager import PoolManager, ProxyManager, proxy_from_url
21
+ from .response import BaseHTTPResponse, HTTPResponse
22
+ from .util.request import make_headers
23
+ from .util.retry import Retry
24
+ from .util.timeout import Timeout
25
+
26
+ # Ensure that Python is compiled with OpenSSL 1.1.1+
27
+ # If the 'ssl' module isn't available at all that's
28
+ # fine, we only care if the module is available.
29
+ try:
30
+ import ssl
31
+ except ImportError:
32
+ pass
33
+ else:
34
+ if not ssl.OPENSSL_VERSION.startswith("OpenSSL "): # Defensive:
35
+ warnings.warn(
36
+ "urllib3 v2 only supports OpenSSL 1.1.1+, currently "
37
+ f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. "
38
+ "See: https://github.com/urllib3/urllib3/issues/3020",
39
+ exceptions.NotOpenSSLWarning,
40
+ )
41
+ elif ssl.OPENSSL_VERSION_INFO < (1, 1, 1): # Defensive:
42
+ raise ImportError(
43
+ "urllib3 v2 only supports OpenSSL 1.1.1+, currently "
44
+ f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. "
45
+ "See: https://github.com/urllib3/urllib3/issues/2168"
46
+ )
47
+
48
+ __author__ = "Andrey Petrov ([email protected])"
49
+ __license__ = "MIT"
50
+ __version__ = __version__
51
+
52
+ __all__ = (
53
+ "HTTPConnectionPool",
54
+ "HTTPHeaderDict",
55
+ "HTTPSConnectionPool",
56
+ "PoolManager",
57
+ "ProxyManager",
58
+ "HTTPResponse",
59
+ "Retry",
60
+ "Timeout",
61
+ "add_stderr_logger",
62
+ "connection_from_url",
63
+ "disable_warnings",
64
+ "encode_multipart_formdata",
65
+ "make_headers",
66
+ "proxy_from_url",
67
+ "request",
68
+ "BaseHTTPResponse",
69
+ )
70
+
71
+ logging.getLogger(__name__).addHandler(NullHandler())
72
+
73
+
74
+ def add_stderr_logger(
75
+ level: int = logging.DEBUG,
76
+ ) -> logging.StreamHandler[typing.TextIO]:
77
+ """
78
+ Helper for quickly adding a StreamHandler to the logger. Useful for
79
+ debugging.
80
+
81
+ Returns the handler after adding it.
82
+ """
83
+ # This method needs to be in this __init__.py to get the __name__ correct
84
+ # even if urllib3 is vendored within another package.
85
+ logger = logging.getLogger(__name__)
86
+ handler = logging.StreamHandler()
87
+ handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
88
+ logger.addHandler(handler)
89
+ logger.setLevel(level)
90
+ logger.debug("Added a stderr logging handler to logger: %s", __name__)
91
+ return handler
92
+
93
+
94
+ # ... Clean up.
95
+ del NullHandler
96
+
97
+
98
+ # All warning filters *must* be appended unless you're really certain that they
99
+ # shouldn't be: otherwise, it's very hard for users to use most Python
100
+ # mechanisms to silence them.
101
+ # SecurityWarning's always go off by default.
102
+ warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
103
+ # InsecurePlatformWarning's don't vary between requests, so we keep it default.
104
+ warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
105
+
106
+
107
+ def disable_warnings(category: type[Warning] = exceptions.HTTPWarning) -> None:
108
+ """
109
+ Helper for quickly disabling all urllib3 warnings.
110
+ """
111
+ warnings.simplefilter("ignore", category)
112
+
113
+
114
+ _DEFAULT_POOL = PoolManager()
115
+
116
+
117
+ def request(
118
+ method: str,
119
+ url: str,
120
+ *,
121
+ body: _TYPE_BODY | None = None,
122
+ fields: _TYPE_FIELDS | None = None,
123
+ headers: typing.Mapping[str, str] | None = None,
124
+ preload_content: bool | None = True,
125
+ decode_content: bool | None = True,
126
+ redirect: bool | None = True,
127
+ retries: Retry | bool | int | None = None,
128
+ timeout: Timeout | float | int | None = 3,
129
+ json: typing.Any | None = None,
130
+ ) -> BaseHTTPResponse:
131
+ """
132
+ A convenience, top-level request method. It uses a module-global ``PoolManager`` instance.
133
+ Therefore, its side effects could be shared across dependencies relying on it.
134
+ To avoid side effects create a new ``PoolManager`` instance and use it instead.
135
+ The method does not accept low-level ``**urlopen_kw`` keyword arguments.
136
+
137
+ :param method:
138
+ HTTP request method (such as GET, POST, PUT, etc.)
139
+
140
+ :param url:
141
+ The URL to perform the request on.
142
+
143
+ :param body:
144
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
145
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
146
+
147
+ :param fields:
148
+ Data to encode and send in the request body.
149
+
150
+ :param headers:
151
+ Dictionary of custom headers to send, such as User-Agent,
152
+ If-None-Match, etc.
153
+
154
+ :param bool preload_content:
155
+ If True, the response's body will be preloaded into memory.
156
+
157
+ :param bool decode_content:
158
+ If True, will attempt to decode the body based on the
159
+ 'content-encoding' header.
160
+
161
+ :param redirect:
162
+ If True, automatically handle redirects (status codes 301, 302,
163
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
164
+ will disable redirect, too.
165
+
166
+ :param retries:
167
+ Configure the number of retries to allow before raising a
168
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
169
+
170
+ If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
171
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
172
+ over different types of retries.
173
+ Pass an integer number to retry connection errors that many times,
174
+ but no other types of errors. Pass zero to never retry.
175
+
176
+ If ``False``, then retries are disabled and any exception is raised
177
+ immediately. Also, instead of raising a MaxRetryError on redirects,
178
+ the redirect response will be returned.
179
+
180
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
181
+
182
+ :param timeout:
183
+ If specified, overrides the default timeout for this one
184
+ request. It may be a float (in seconds) or an instance of
185
+ :class:`urllib3.util.Timeout`.
186
+
187
+ :param json:
188
+ Data to encode and send as JSON with UTF-encoded in the request body.
189
+ The ``"Content-Type"`` header will be set to ``"application/json"``
190
+ unless specified otherwise.
191
+ """
192
+
193
+ return _DEFAULT_POOL.request(
194
+ method,
195
+ url,
196
+ body=body,
197
+ fields=fields,
198
+ headers=headers,
199
+ preload_content=preload_content,
200
+ decode_content=decode_content,
201
+ redirect=redirect,
202
+ retries=retries,
203
+ timeout=timeout,
204
+ json=json,
205
+ )
206
+
207
+
208
+ if sys.platform == "emscripten":
209
+ from .contrib.emscripten import inject_into_urllib3 # noqa: 401
210
+
211
+ inject_into_urllib3()
venv/lib/python3.10/site-packages/urllib3/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (6.11 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/_base_connection.cpython-310.pyc ADDED
Binary file (5.7 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/_collections.cpython-310.pyc ADDED
Binary file (16.2 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/_request_methods.cpython-310.pyc ADDED
Binary file (9.14 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/_version.cpython-310.pyc ADDED
Binary file (247 Bytes). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/connection.cpython-310.pyc ADDED
Binary file (22.3 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/connectionpool.cpython-310.pyc ADDED
Binary file (29.3 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (13.2 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/fields.cpython-310.pyc ADDED
Binary file (9.72 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/http2.cpython-310.pyc ADDED
Binary file (7.24 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/poolmanager.cpython-310.pyc ADDED
Binary file (18.2 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/__pycache__/response.cpython-310.pyc ADDED
Binary file (32.9 kB). View file
 
venv/lib/python3.10/site-packages/urllib3/_base_connection.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+
5
+ from .util.connection import _TYPE_SOCKET_OPTIONS
6
+ from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
7
+ from .util.url import Url
8
+
9
+ _TYPE_BODY = typing.Union[bytes, typing.IO[typing.Any], typing.Iterable[bytes], str]
10
+
11
+
12
+ class ProxyConfig(typing.NamedTuple):
13
+ ssl_context: ssl.SSLContext | None
14
+ use_forwarding_for_https: bool
15
+ assert_hostname: None | str | Literal[False]
16
+ assert_fingerprint: str | None
17
+
18
+
19
+ class _ResponseOptions(typing.NamedTuple):
20
+ # TODO: Remove this in favor of a better
21
+ # HTTP request/response lifecycle tracking.
22
+ request_method: str
23
+ request_url: str
24
+ preload_content: bool
25
+ decode_content: bool
26
+ enforce_content_length: bool
27
+
28
+
29
+ if typing.TYPE_CHECKING:
30
+ import ssl
31
+ from typing import Literal, Protocol
32
+
33
+ from .response import BaseHTTPResponse
34
+
35
+ class BaseHTTPConnection(Protocol):
36
+ default_port: typing.ClassVar[int]
37
+ default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
38
+
39
+ host: str
40
+ port: int
41
+ timeout: None | (
42
+ float
43
+ ) # Instance doesn't store _DEFAULT_TIMEOUT, must be resolved.
44
+ blocksize: int
45
+ source_address: tuple[str, int] | None
46
+ socket_options: _TYPE_SOCKET_OPTIONS | None
47
+
48
+ proxy: Url | None
49
+ proxy_config: ProxyConfig | None
50
+
51
+ is_verified: bool
52
+ proxy_is_verified: bool | None
53
+
54
+ def __init__(
55
+ self,
56
+ host: str,
57
+ port: int | None = None,
58
+ *,
59
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
60
+ source_address: tuple[str, int] | None = None,
61
+ blocksize: int = 8192,
62
+ socket_options: _TYPE_SOCKET_OPTIONS | None = ...,
63
+ proxy: Url | None = None,
64
+ proxy_config: ProxyConfig | None = None,
65
+ ) -> None:
66
+ ...
67
+
68
+ def set_tunnel(
69
+ self,
70
+ host: str,
71
+ port: int | None = None,
72
+ headers: typing.Mapping[str, str] | None = None,
73
+ scheme: str = "http",
74
+ ) -> None:
75
+ ...
76
+
77
+ def connect(self) -> None:
78
+ ...
79
+
80
+ def request(
81
+ self,
82
+ method: str,
83
+ url: str,
84
+ body: _TYPE_BODY | None = None,
85
+ headers: typing.Mapping[str, str] | None = None,
86
+ # We know *at least* botocore is depending on the order of the
87
+ # first 3 parameters so to be safe we only mark the later ones
88
+ # as keyword-only to ensure we have space to extend.
89
+ *,
90
+ chunked: bool = False,
91
+ preload_content: bool = True,
92
+ decode_content: bool = True,
93
+ enforce_content_length: bool = True,
94
+ ) -> None:
95
+ ...
96
+
97
+ def getresponse(self) -> BaseHTTPResponse:
98
+ ...
99
+
100
+ def close(self) -> None:
101
+ ...
102
+
103
+ @property
104
+ def is_closed(self) -> bool:
105
+ """Whether the connection either is brand new or has been previously closed.
106
+ If this property is True then both ``is_connected`` and ``has_connected_to_proxy``
107
+ properties must be False.
108
+ """
109
+
110
+ @property
111
+ def is_connected(self) -> bool:
112
+ """Whether the connection is actively connected to any origin (proxy or target)"""
113
+
114
+ @property
115
+ def has_connected_to_proxy(self) -> bool:
116
+ """Whether the connection has successfully connected to its proxy.
117
+ This returns False if no proxy is in use. Used to determine whether
118
+ errors are coming from the proxy layer or from tunnelling to the target origin.
119
+ """
120
+
121
+ class BaseHTTPSConnection(BaseHTTPConnection, Protocol):
122
+ default_port: typing.ClassVar[int]
123
+ default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
124
+
125
+ # Certificate verification methods
126
+ cert_reqs: int | str | None
127
+ assert_hostname: None | str | Literal[False]
128
+ assert_fingerprint: str | None
129
+ ssl_context: ssl.SSLContext | None
130
+
131
+ # Trusted CAs
132
+ ca_certs: str | None
133
+ ca_cert_dir: str | None
134
+ ca_cert_data: None | str | bytes
135
+
136
+ # TLS version
137
+ ssl_minimum_version: int | None
138
+ ssl_maximum_version: int | None
139
+ ssl_version: int | str | None # Deprecated
140
+
141
+ # Client certificates
142
+ cert_file: str | None
143
+ key_file: str | None
144
+ key_password: str | None
145
+
146
+ def __init__(
147
+ self,
148
+ host: str,
149
+ port: int | None = None,
150
+ *,
151
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
152
+ source_address: tuple[str, int] | None = None,
153
+ blocksize: int = 16384,
154
+ socket_options: _TYPE_SOCKET_OPTIONS | None = ...,
155
+ proxy: Url | None = None,
156
+ proxy_config: ProxyConfig | None = None,
157
+ cert_reqs: int | str | None = None,
158
+ assert_hostname: None | str | Literal[False] = None,
159
+ assert_fingerprint: str | None = None,
160
+ server_hostname: str | None = None,
161
+ ssl_context: ssl.SSLContext | None = None,
162
+ ca_certs: str | None = None,
163
+ ca_cert_dir: str | None = None,
164
+ ca_cert_data: None | str | bytes = None,
165
+ ssl_minimum_version: int | None = None,
166
+ ssl_maximum_version: int | None = None,
167
+ ssl_version: int | str | None = None, # Deprecated
168
+ cert_file: str | None = None,
169
+ key_file: str | None = None,
170
+ key_password: str | None = None,
171
+ ) -> None:
172
+ ...
venv/lib/python3.10/site-packages/urllib3/_collections.py ADDED
@@ -0,0 +1,483 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+ from collections import OrderedDict
5
+ from enum import Enum, auto
6
+ from threading import RLock
7
+
8
+ if typing.TYPE_CHECKING:
9
+ # We can only import Protocol if TYPE_CHECKING because it's a development
10
+ # dependency, and is not available at runtime.
11
+ from typing import Protocol
12
+
13
+ from typing_extensions import Self
14
+
15
+ class HasGettableStringKeys(Protocol):
16
+ def keys(self) -> typing.Iterator[str]:
17
+ ...
18
+
19
+ def __getitem__(self, key: str) -> str:
20
+ ...
21
+
22
+
23
+ __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
24
+
25
+
26
+ # Key type
27
+ _KT = typing.TypeVar("_KT")
28
+ # Value type
29
+ _VT = typing.TypeVar("_VT")
30
+ # Default type
31
+ _DT = typing.TypeVar("_DT")
32
+
33
+ ValidHTTPHeaderSource = typing.Union[
34
+ "HTTPHeaderDict",
35
+ typing.Mapping[str, str],
36
+ typing.Iterable[typing.Tuple[str, str]],
37
+ "HasGettableStringKeys",
38
+ ]
39
+
40
+
41
+ class _Sentinel(Enum):
42
+ not_passed = auto()
43
+
44
+
45
+ def ensure_can_construct_http_header_dict(
46
+ potential: object,
47
+ ) -> ValidHTTPHeaderSource | None:
48
+ if isinstance(potential, HTTPHeaderDict):
49
+ return potential
50
+ elif isinstance(potential, typing.Mapping):
51
+ # Full runtime checking of the contents of a Mapping is expensive, so for the
52
+ # purposes of typechecking, we assume that any Mapping is the right shape.
53
+ return typing.cast(typing.Mapping[str, str], potential)
54
+ elif isinstance(potential, typing.Iterable):
55
+ # Similarly to Mapping, full runtime checking of the contents of an Iterable is
56
+ # expensive, so for the purposes of typechecking, we assume that any Iterable
57
+ # is the right shape.
58
+ return typing.cast(typing.Iterable[typing.Tuple[str, str]], potential)
59
+ elif hasattr(potential, "keys") and hasattr(potential, "__getitem__"):
60
+ return typing.cast("HasGettableStringKeys", potential)
61
+ else:
62
+ return None
63
+
64
+
65
+ class RecentlyUsedContainer(typing.Generic[_KT, _VT], typing.MutableMapping[_KT, _VT]):
66
+ """
67
+ Provides a thread-safe dict-like container which maintains up to
68
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
69
+ ``maxsize``.
70
+
71
+ :param maxsize:
72
+ Maximum number of recent elements to retain.
73
+
74
+ :param dispose_func:
75
+ Every time an item is evicted from the container,
76
+ ``dispose_func(value)`` is called. Callback which will get called
77
+ """
78
+
79
+ _container: typing.OrderedDict[_KT, _VT]
80
+ _maxsize: int
81
+ dispose_func: typing.Callable[[_VT], None] | None
82
+ lock: RLock
83
+
84
+ def __init__(
85
+ self,
86
+ maxsize: int = 10,
87
+ dispose_func: typing.Callable[[_VT], None] | None = None,
88
+ ) -> None:
89
+ super().__init__()
90
+ self._maxsize = maxsize
91
+ self.dispose_func = dispose_func
92
+ self._container = OrderedDict()
93
+ self.lock = RLock()
94
+
95
+ def __getitem__(self, key: _KT) -> _VT:
96
+ # Re-insert the item, moving it to the end of the eviction line.
97
+ with self.lock:
98
+ item = self._container.pop(key)
99
+ self._container[key] = item
100
+ return item
101
+
102
+ def __setitem__(self, key: _KT, value: _VT) -> None:
103
+ evicted_item = None
104
+ with self.lock:
105
+ # Possibly evict the existing value of 'key'
106
+ try:
107
+ # If the key exists, we'll overwrite it, which won't change the
108
+ # size of the pool. Because accessing a key should move it to
109
+ # the end of the eviction line, we pop it out first.
110
+ evicted_item = key, self._container.pop(key)
111
+ self._container[key] = value
112
+ except KeyError:
113
+ # When the key does not exist, we insert the value first so that
114
+ # evicting works in all cases, including when self._maxsize is 0
115
+ self._container[key] = value
116
+ if len(self._container) > self._maxsize:
117
+ # If we didn't evict an existing value, and we've hit our maximum
118
+ # size, then we have to evict the least recently used item from
119
+ # the beginning of the container.
120
+ evicted_item = self._container.popitem(last=False)
121
+
122
+ # After releasing the lock on the pool, dispose of any evicted value.
123
+ if evicted_item is not None and self.dispose_func:
124
+ _, evicted_value = evicted_item
125
+ self.dispose_func(evicted_value)
126
+
127
+ def __delitem__(self, key: _KT) -> None:
128
+ with self.lock:
129
+ value = self._container.pop(key)
130
+
131
+ if self.dispose_func:
132
+ self.dispose_func(value)
133
+
134
+ def __len__(self) -> int:
135
+ with self.lock:
136
+ return len(self._container)
137
+
138
+ def __iter__(self) -> typing.NoReturn:
139
+ raise NotImplementedError(
140
+ "Iteration over this class is unlikely to be threadsafe."
141
+ )
142
+
143
+ def clear(self) -> None:
144
+ with self.lock:
145
+ # Copy pointers to all values, then wipe the mapping
146
+ values = list(self._container.values())
147
+ self._container.clear()
148
+
149
+ if self.dispose_func:
150
+ for value in values:
151
+ self.dispose_func(value)
152
+
153
+ def keys(self) -> set[_KT]: # type: ignore[override]
154
+ with self.lock:
155
+ return set(self._container.keys())
156
+
157
+
158
+ class HTTPHeaderDictItemView(typing.Set[typing.Tuple[str, str]]):
159
+ """
160
+ HTTPHeaderDict is unusual for a Mapping[str, str] in that it has two modes of
161
+ address.
162
+
163
+ If we directly try to get an item with a particular name, we will get a string
164
+ back that is the concatenated version of all the values:
165
+
166
+ >>> d['X-Header-Name']
167
+ 'Value1, Value2, Value3'
168
+
169
+ However, if we iterate over an HTTPHeaderDict's items, we will optionally combine
170
+ these values based on whether combine=True was called when building up the dictionary
171
+
172
+ >>> d = HTTPHeaderDict({"A": "1", "B": "foo"})
173
+ >>> d.add("A", "2", combine=True)
174
+ >>> d.add("B", "bar")
175
+ >>> list(d.items())
176
+ [
177
+ ('A', '1, 2'),
178
+ ('B', 'foo'),
179
+ ('B', 'bar'),
180
+ ]
181
+
182
+ This class conforms to the interface required by the MutableMapping ABC while
183
+ also giving us the nonstandard iteration behavior we want; items with duplicate
184
+ keys, ordered by time of first insertion.
185
+ """
186
+
187
+ _headers: HTTPHeaderDict
188
+
189
+ def __init__(self, headers: HTTPHeaderDict) -> None:
190
+ self._headers = headers
191
+
192
+ def __len__(self) -> int:
193
+ return len(list(self._headers.iteritems()))
194
+
195
+ def __iter__(self) -> typing.Iterator[tuple[str, str]]:
196
+ return self._headers.iteritems()
197
+
198
+ def __contains__(self, item: object) -> bool:
199
+ if isinstance(item, tuple) and len(item) == 2:
200
+ passed_key, passed_val = item
201
+ if isinstance(passed_key, str) and isinstance(passed_val, str):
202
+ return self._headers._has_value_for_header(passed_key, passed_val)
203
+ return False
204
+
205
+
206
+ class HTTPHeaderDict(typing.MutableMapping[str, str]):
207
+ """
208
+ :param headers:
209
+ An iterable of field-value pairs. Must not contain multiple field names
210
+ when compared case-insensitively.
211
+
212
+ :param kwargs:
213
+ Additional field-value pairs to pass in to ``dict.update``.
214
+
215
+ A ``dict`` like container for storing HTTP Headers.
216
+
217
+ Field names are stored and compared case-insensitively in compliance with
218
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
219
+ case-insensitive pair.
220
+
221
+ Using ``__setitem__`` syntax overwrites fields that compare equal
222
+ case-insensitively in order to maintain ``dict``'s api. For fields that
223
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
224
+ in a loop.
225
+
226
+ If multiple fields that are equal case-insensitively are passed to the
227
+ constructor or ``.update``, the behavior is undefined and some will be
228
+ lost.
229
+
230
+ >>> headers = HTTPHeaderDict()
231
+ >>> headers.add('Set-Cookie', 'foo=bar')
232
+ >>> headers.add('set-cookie', 'baz=quxx')
233
+ >>> headers['content-length'] = '7'
234
+ >>> headers['SET-cookie']
235
+ 'foo=bar, baz=quxx'
236
+ >>> headers['Content-Length']
237
+ '7'
238
+ """
239
+
240
+ _container: typing.MutableMapping[str, list[str]]
241
+
242
+ def __init__(self, headers: ValidHTTPHeaderSource | None = None, **kwargs: str):
243
+ super().__init__()
244
+ self._container = {} # 'dict' is insert-ordered
245
+ if headers is not None:
246
+ if isinstance(headers, HTTPHeaderDict):
247
+ self._copy_from(headers)
248
+ else:
249
+ self.extend(headers)
250
+ if kwargs:
251
+ self.extend(kwargs)
252
+
253
+ def __setitem__(self, key: str, val: str) -> None:
254
+ # avoid a bytes/str comparison by decoding before httplib
255
+ if isinstance(key, bytes):
256
+ key = key.decode("latin-1")
257
+ self._container[key.lower()] = [key, val]
258
+
259
+ def __getitem__(self, key: str) -> str:
260
+ val = self._container[key.lower()]
261
+ return ", ".join(val[1:])
262
+
263
+ def __delitem__(self, key: str) -> None:
264
+ del self._container[key.lower()]
265
+
266
+ def __contains__(self, key: object) -> bool:
267
+ if isinstance(key, str):
268
+ return key.lower() in self._container
269
+ return False
270
+
271
+ def setdefault(self, key: str, default: str = "") -> str:
272
+ return super().setdefault(key, default)
273
+
274
+ def __eq__(self, other: object) -> bool:
275
+ maybe_constructable = ensure_can_construct_http_header_dict(other)
276
+ if maybe_constructable is None:
277
+ return False
278
+ else:
279
+ other_as_http_header_dict = type(self)(maybe_constructable)
280
+
281
+ return {k.lower(): v for k, v in self.itermerged()} == {
282
+ k.lower(): v for k, v in other_as_http_header_dict.itermerged()
283
+ }
284
+
285
+ def __ne__(self, other: object) -> bool:
286
+ return not self.__eq__(other)
287
+
288
+ def __len__(self) -> int:
289
+ return len(self._container)
290
+
291
+ def __iter__(self) -> typing.Iterator[str]:
292
+ # Only provide the originally cased names
293
+ for vals in self._container.values():
294
+ yield vals[0]
295
+
296
+ def discard(self, key: str) -> None:
297
+ try:
298
+ del self[key]
299
+ except KeyError:
300
+ pass
301
+
302
+ def add(self, key: str, val: str, *, combine: bool = False) -> None:
303
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
304
+ exists.
305
+
306
+ If this is called with combine=True, instead of adding a new header value
307
+ as a distinct item during iteration, this will instead append the value to
308
+ any existing header value with a comma. If no existing header value exists
309
+ for the key, then the value will simply be added, ignoring the combine parameter.
310
+
311
+ >>> headers = HTTPHeaderDict(foo='bar')
312
+ >>> headers.add('Foo', 'baz')
313
+ >>> headers['foo']
314
+ 'bar, baz'
315
+ >>> list(headers.items())
316
+ [('foo', 'bar'), ('foo', 'baz')]
317
+ >>> headers.add('foo', 'quz', combine=True)
318
+ >>> list(headers.items())
319
+ [('foo', 'bar, baz, quz')]
320
+ """
321
+ # avoid a bytes/str comparison by decoding before httplib
322
+ if isinstance(key, bytes):
323
+ key = key.decode("latin-1")
324
+ key_lower = key.lower()
325
+ new_vals = [key, val]
326
+ # Keep the common case aka no item present as fast as possible
327
+ vals = self._container.setdefault(key_lower, new_vals)
328
+ if new_vals is not vals:
329
+ # if there are values here, then there is at least the initial
330
+ # key/value pair
331
+ assert len(vals) >= 2
332
+ if combine:
333
+ vals[-1] = vals[-1] + ", " + val
334
+ else:
335
+ vals.append(val)
336
+
337
+ def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None:
338
+ """Generic import function for any type of header-like object.
339
+ Adapted version of MutableMapping.update in order to insert items
340
+ with self.add instead of self.__setitem__
341
+ """
342
+ if len(args) > 1:
343
+ raise TypeError(
344
+ f"extend() takes at most 1 positional arguments ({len(args)} given)"
345
+ )
346
+ other = args[0] if len(args) >= 1 else ()
347
+
348
+ if isinstance(other, HTTPHeaderDict):
349
+ for key, val in other.iteritems():
350
+ self.add(key, val)
351
+ elif isinstance(other, typing.Mapping):
352
+ for key, val in other.items():
353
+ self.add(key, val)
354
+ elif isinstance(other, typing.Iterable):
355
+ other = typing.cast(typing.Iterable[typing.Tuple[str, str]], other)
356
+ for key, value in other:
357
+ self.add(key, value)
358
+ elif hasattr(other, "keys") and hasattr(other, "__getitem__"):
359
+ # THIS IS NOT A TYPESAFE BRANCH
360
+ # In this branch, the object has a `keys` attr but is not a Mapping or any of
361
+ # the other types indicated in the method signature. We do some stuff with
362
+ # it as though it partially implements the Mapping interface, but we're not
363
+ # doing that stuff safely AT ALL.
364
+ for key in other.keys():
365
+ self.add(key, other[key])
366
+
367
+ for key, value in kwargs.items():
368
+ self.add(key, value)
369
+
370
+ @typing.overload
371
+ def getlist(self, key: str) -> list[str]:
372
+ ...
373
+
374
+ @typing.overload
375
+ def getlist(self, key: str, default: _DT) -> list[str] | _DT:
376
+ ...
377
+
378
+ def getlist(
379
+ self, key: str, default: _Sentinel | _DT = _Sentinel.not_passed
380
+ ) -> list[str] | _DT:
381
+ """Returns a list of all the values for the named field. Returns an
382
+ empty list if the key doesn't exist."""
383
+ try:
384
+ vals = self._container[key.lower()]
385
+ except KeyError:
386
+ if default is _Sentinel.not_passed:
387
+ # _DT is unbound; empty list is instance of List[str]
388
+ return []
389
+ # _DT is bound; default is instance of _DT
390
+ return default
391
+ else:
392
+ # _DT may or may not be bound; vals[1:] is instance of List[str], which
393
+ # meets our external interface requirement of `Union[List[str], _DT]`.
394
+ return vals[1:]
395
+
396
+ def _prepare_for_method_change(self) -> Self:
397
+ """
398
+ Remove content-specific header fields before changing the request
399
+ method to GET or HEAD according to RFC 9110, Section 15.4.
400
+ """
401
+ content_specific_headers = [
402
+ "Content-Encoding",
403
+ "Content-Language",
404
+ "Content-Location",
405
+ "Content-Type",
406
+ "Content-Length",
407
+ "Digest",
408
+ "Last-Modified",
409
+ ]
410
+ for header in content_specific_headers:
411
+ self.discard(header)
412
+ return self
413
+
414
+ # Backwards compatibility for httplib
415
+ getheaders = getlist
416
+ getallmatchingheaders = getlist
417
+ iget = getlist
418
+
419
+ # Backwards compatibility for http.cookiejar
420
+ get_all = getlist
421
+
422
+ def __repr__(self) -> str:
423
+ return f"{type(self).__name__}({dict(self.itermerged())})"
424
+
425
+ def _copy_from(self, other: HTTPHeaderDict) -> None:
426
+ for key in other:
427
+ val = other.getlist(key)
428
+ self._container[key.lower()] = [key, *val]
429
+
430
+ def copy(self) -> HTTPHeaderDict:
431
+ clone = type(self)()
432
+ clone._copy_from(self)
433
+ return clone
434
+
435
+ def iteritems(self) -> typing.Iterator[tuple[str, str]]:
436
+ """Iterate over all header lines, including duplicate ones."""
437
+ for key in self:
438
+ vals = self._container[key.lower()]
439
+ for val in vals[1:]:
440
+ yield vals[0], val
441
+
442
+ def itermerged(self) -> typing.Iterator[tuple[str, str]]:
443
+ """Iterate over all headers, merging duplicate ones together."""
444
+ for key in self:
445
+ val = self._container[key.lower()]
446
+ yield val[0], ", ".join(val[1:])
447
+
448
+ def items(self) -> HTTPHeaderDictItemView: # type: ignore[override]
449
+ return HTTPHeaderDictItemView(self)
450
+
451
+ def _has_value_for_header(self, header_name: str, potential_value: str) -> bool:
452
+ if header_name in self:
453
+ return potential_value in self._container[header_name.lower()][1:]
454
+ return False
455
+
456
+ def __ior__(self, other: object) -> HTTPHeaderDict:
457
+ # Supports extending a header dict in-place using operator |=
458
+ # combining items with add instead of __setitem__
459
+ maybe_constructable = ensure_can_construct_http_header_dict(other)
460
+ if maybe_constructable is None:
461
+ return NotImplemented
462
+ self.extend(maybe_constructable)
463
+ return self
464
+
465
+ def __or__(self, other: object) -> HTTPHeaderDict:
466
+ # Supports merging header dicts using operator |
467
+ # combining items with add instead of __setitem__
468
+ maybe_constructable = ensure_can_construct_http_header_dict(other)
469
+ if maybe_constructable is None:
470
+ return NotImplemented
471
+ result = self.copy()
472
+ result.extend(maybe_constructable)
473
+ return result
474
+
475
+ def __ror__(self, other: object) -> HTTPHeaderDict:
476
+ # Supports merging header dicts using operator | when other is on left side
477
+ # combining items with add instead of __setitem__
478
+ maybe_constructable = ensure_can_construct_http_header_dict(other)
479
+ if maybe_constructable is None:
480
+ return NotImplemented
481
+ result = type(self)(maybe_constructable)
482
+ result.extend(self)
483
+ return result
venv/lib/python3.10/site-packages/urllib3/_request_methods.py ADDED
@@ -0,0 +1,279 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import json as _json
4
+ import typing
5
+ from urllib.parse import urlencode
6
+
7
+ from ._base_connection import _TYPE_BODY
8
+ from ._collections import HTTPHeaderDict
9
+ from .filepost import _TYPE_FIELDS, encode_multipart_formdata
10
+ from .response import BaseHTTPResponse
11
+
12
+ __all__ = ["RequestMethods"]
13
+
14
+ _TYPE_ENCODE_URL_FIELDS = typing.Union[
15
+ typing.Sequence[typing.Tuple[str, typing.Union[str, bytes]]],
16
+ typing.Mapping[str, typing.Union[str, bytes]],
17
+ ]
18
+
19
+
20
+ class RequestMethods:
21
+ """
22
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
23
+ as :class:`urllib3.HTTPConnectionPool` and
24
+ :class:`urllib3.PoolManager`.
25
+
26
+ Provides behavior for making common types of HTTP request methods and
27
+ decides which type of request field encoding to use.
28
+
29
+ Specifically,
30
+
31
+ :meth:`.request_encode_url` is for sending requests whose fields are
32
+ encoded in the URL (such as GET, HEAD, DELETE).
33
+
34
+ :meth:`.request_encode_body` is for sending requests whose fields are
35
+ encoded in the *body* of the request using multipart or www-form-urlencoded
36
+ (such as for POST, PUT, PATCH).
37
+
38
+ :meth:`.request` is for making any kind of request, it will look up the
39
+ appropriate encoding format and use one of the above two methods to make
40
+ the request.
41
+
42
+ Initializer parameters:
43
+
44
+ :param headers:
45
+ Headers to include with all requests, unless other headers are given
46
+ explicitly.
47
+ """
48
+
49
+ _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
50
+
51
+ def __init__(self, headers: typing.Mapping[str, str] | None = None) -> None:
52
+ self.headers = headers or {}
53
+
54
+ def urlopen(
55
+ self,
56
+ method: str,
57
+ url: str,
58
+ body: _TYPE_BODY | None = None,
59
+ headers: typing.Mapping[str, str] | None = None,
60
+ encode_multipart: bool = True,
61
+ multipart_boundary: str | None = None,
62
+ **kw: typing.Any,
63
+ ) -> BaseHTTPResponse: # Abstract
64
+ raise NotImplementedError(
65
+ "Classes extending RequestMethods must implement "
66
+ "their own ``urlopen`` method."
67
+ )
68
+
69
+ def request(
70
+ self,
71
+ method: str,
72
+ url: str,
73
+ body: _TYPE_BODY | None = None,
74
+ fields: _TYPE_FIELDS | None = None,
75
+ headers: typing.Mapping[str, str] | None = None,
76
+ json: typing.Any | None = None,
77
+ **urlopen_kw: typing.Any,
78
+ ) -> BaseHTTPResponse:
79
+ """
80
+ Make a request using :meth:`urlopen` with the appropriate encoding of
81
+ ``fields`` based on the ``method`` used.
82
+
83
+ This is a convenience method that requires the least amount of manual
84
+ effort. It can be used in most situations, while still having the
85
+ option to drop down to more specific methods when necessary, such as
86
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
87
+ or even the lowest level :meth:`urlopen`.
88
+
89
+ :param method:
90
+ HTTP request method (such as GET, POST, PUT, etc.)
91
+
92
+ :param url:
93
+ The URL to perform the request on.
94
+
95
+ :param body:
96
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
97
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
98
+
99
+ :param fields:
100
+ Data to encode and send in the request body. Values are processed
101
+ by :func:`urllib.parse.urlencode`.
102
+
103
+ :param headers:
104
+ Dictionary of custom headers to send, such as User-Agent,
105
+ If-None-Match, etc. If None, pool headers are used. If provided,
106
+ these headers completely replace any pool-specific headers.
107
+
108
+ :param json:
109
+ Data to encode and send as JSON with UTF-encoded in the request body.
110
+ The ``"Content-Type"`` header will be set to ``"application/json"``
111
+ unless specified otherwise.
112
+ """
113
+ method = method.upper()
114
+
115
+ if json is not None and body is not None:
116
+ raise TypeError(
117
+ "request got values for both 'body' and 'json' parameters which are mutually exclusive"
118
+ )
119
+
120
+ if json is not None:
121
+ if headers is None:
122
+ headers = self.headers
123
+
124
+ if not ("content-type" in map(str.lower, headers.keys())):
125
+ headers = HTTPHeaderDict(headers)
126
+ headers["Content-Type"] = "application/json"
127
+
128
+ body = _json.dumps(json, separators=(",", ":"), ensure_ascii=False).encode(
129
+ "utf-8"
130
+ )
131
+
132
+ if body is not None:
133
+ urlopen_kw["body"] = body
134
+
135
+ if method in self._encode_url_methods:
136
+ return self.request_encode_url(
137
+ method,
138
+ url,
139
+ fields=fields, # type: ignore[arg-type]
140
+ headers=headers,
141
+ **urlopen_kw,
142
+ )
143
+ else:
144
+ return self.request_encode_body(
145
+ method, url, fields=fields, headers=headers, **urlopen_kw
146
+ )
147
+
148
+ def request_encode_url(
149
+ self,
150
+ method: str,
151
+ url: str,
152
+ fields: _TYPE_ENCODE_URL_FIELDS | None = None,
153
+ headers: typing.Mapping[str, str] | None = None,
154
+ **urlopen_kw: str,
155
+ ) -> BaseHTTPResponse:
156
+ """
157
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
158
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
159
+
160
+ :param method:
161
+ HTTP request method (such as GET, POST, PUT, etc.)
162
+
163
+ :param url:
164
+ The URL to perform the request on.
165
+
166
+ :param fields:
167
+ Data to encode and send in the request body.
168
+
169
+ :param headers:
170
+ Dictionary of custom headers to send, such as User-Agent,
171
+ If-None-Match, etc. If None, pool headers are used. If provided,
172
+ these headers completely replace any pool-specific headers.
173
+ """
174
+ if headers is None:
175
+ headers = self.headers
176
+
177
+ extra_kw: dict[str, typing.Any] = {"headers": headers}
178
+ extra_kw.update(urlopen_kw)
179
+
180
+ if fields:
181
+ url += "?" + urlencode(fields)
182
+
183
+ return self.urlopen(method, url, **extra_kw)
184
+
185
+ def request_encode_body(
186
+ self,
187
+ method: str,
188
+ url: str,
189
+ fields: _TYPE_FIELDS | None = None,
190
+ headers: typing.Mapping[str, str] | None = None,
191
+ encode_multipart: bool = True,
192
+ multipart_boundary: str | None = None,
193
+ **urlopen_kw: str,
194
+ ) -> BaseHTTPResponse:
195
+ """
196
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
197
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
198
+
199
+ When ``encode_multipart=True`` (default), then
200
+ :func:`urllib3.encode_multipart_formdata` is used to encode
201
+ the payload with the appropriate content type. Otherwise
202
+ :func:`urllib.parse.urlencode` is used with the
203
+ 'application/x-www-form-urlencoded' content type.
204
+
205
+ Multipart encoding must be used when posting files, and it's reasonably
206
+ safe to use it in other times too. However, it may break request
207
+ signing, such as with OAuth.
208
+
209
+ Supports an optional ``fields`` parameter of key/value strings AND
210
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
211
+ the MIME type is optional. For example::
212
+
213
+ fields = {
214
+ 'foo': 'bar',
215
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
216
+ 'realfile': ('barfile.txt', open('realfile').read()),
217
+ 'typedfile': ('bazfile.bin', open('bazfile').read(),
218
+ 'image/jpeg'),
219
+ 'nonamefile': 'contents of nonamefile field',
220
+ }
221
+
222
+ When uploading a file, providing a filename (the first parameter of the
223
+ tuple) is optional but recommended to best mimic behavior of browsers.
224
+
225
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
226
+ be overwritten because it depends on the dynamic random boundary string
227
+ which is used to compose the body of the request. The random boundary
228
+ string can be explicitly set with the ``multipart_boundary`` parameter.
229
+
230
+ :param method:
231
+ HTTP request method (such as GET, POST, PUT, etc.)
232
+
233
+ :param url:
234
+ The URL to perform the request on.
235
+
236
+ :param fields:
237
+ Data to encode and send in the request body.
238
+
239
+ :param headers:
240
+ Dictionary of custom headers to send, such as User-Agent,
241
+ If-None-Match, etc. If None, pool headers are used. If provided,
242
+ these headers completely replace any pool-specific headers.
243
+
244
+ :param encode_multipart:
245
+ If True, encode the ``fields`` using the multipart/form-data MIME
246
+ format.
247
+
248
+ :param multipart_boundary:
249
+ If not specified, then a random boundary will be generated using
250
+ :func:`urllib3.filepost.choose_boundary`.
251
+ """
252
+ if headers is None:
253
+ headers = self.headers
254
+
255
+ extra_kw: dict[str, typing.Any] = {"headers": HTTPHeaderDict(headers)}
256
+ body: bytes | str
257
+
258
+ if fields:
259
+ if "body" in urlopen_kw:
260
+ raise TypeError(
261
+ "request got values for both 'fields' and 'body', can only specify one."
262
+ )
263
+
264
+ if encode_multipart:
265
+ body, content_type = encode_multipart_formdata(
266
+ fields, boundary=multipart_boundary
267
+ )
268
+ else:
269
+ body, content_type = (
270
+ urlencode(fields), # type: ignore[arg-type]
271
+ "application/x-www-form-urlencoded",
272
+ )
273
+
274
+ extra_kw["body"] = body
275
+ extra_kw["headers"].setdefault("Content-Type", content_type)
276
+
277
+ extra_kw.update(urlopen_kw)
278
+
279
+ return self.urlopen(method, url, **extra_kw)
venv/lib/python3.10/site-packages/urllib3/_version.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # This file is protected via CODEOWNERS
2
+ from __future__ import annotations
3
+
4
+ __version__ = "2.2.1"
venv/lib/python3.10/site-packages/urllib3/connection.py ADDED
@@ -0,0 +1,930 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import datetime
4
+ import logging
5
+ import os
6
+ import re
7
+ import socket
8
+ import sys
9
+ import typing
10
+ import warnings
11
+ from http.client import HTTPConnection as _HTTPConnection
12
+ from http.client import HTTPException as HTTPException # noqa: F401
13
+ from http.client import ResponseNotReady
14
+ from socket import timeout as SocketTimeout
15
+
16
+ if typing.TYPE_CHECKING:
17
+ from typing import Literal
18
+
19
+ from .response import HTTPResponse
20
+ from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT
21
+ from .util.ssltransport import SSLTransport
22
+
23
+ from ._collections import HTTPHeaderDict
24
+ from .util.response import assert_header_parsing
25
+ from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout
26
+ from .util.util import to_str
27
+ from .util.wait import wait_for_read
28
+
29
+ try: # Compiled with SSL?
30
+ import ssl
31
+
32
+ BaseSSLError = ssl.SSLError
33
+ except (ImportError, AttributeError):
34
+ ssl = None # type: ignore[assignment]
35
+
36
+ class BaseSSLError(BaseException): # type: ignore[no-redef]
37
+ pass
38
+
39
+
40
+ from ._base_connection import _TYPE_BODY
41
+ from ._base_connection import ProxyConfig as ProxyConfig
42
+ from ._base_connection import _ResponseOptions as _ResponseOptions
43
+ from ._version import __version__
44
+ from .exceptions import (
45
+ ConnectTimeoutError,
46
+ HeaderParsingError,
47
+ NameResolutionError,
48
+ NewConnectionError,
49
+ ProxyError,
50
+ SystemTimeWarning,
51
+ )
52
+ from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_
53
+ from .util.request import body_to_chunks
54
+ from .util.ssl_ import assert_fingerprint as _assert_fingerprint
55
+ from .util.ssl_ import (
56
+ create_urllib3_context,
57
+ is_ipaddress,
58
+ resolve_cert_reqs,
59
+ resolve_ssl_version,
60
+ ssl_wrap_socket,
61
+ )
62
+ from .util.ssl_match_hostname import CertificateError, match_hostname
63
+ from .util.url import Url
64
+
65
+ # Not a no-op, we're adding this to the namespace so it can be imported.
66
+ ConnectionError = ConnectionError
67
+ BrokenPipeError = BrokenPipeError
68
+
69
+
70
+ log = logging.getLogger(__name__)
71
+
72
+ port_by_scheme = {"http": 80, "https": 443}
73
+
74
+ # When it comes time to update this value as a part of regular maintenance
75
+ # (ie test_recent_date is failing) update it to ~6 months before the current date.
76
+ RECENT_DATE = datetime.date(2023, 6, 1)
77
+
78
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
79
+
80
+ _HAS_SYS_AUDIT = hasattr(sys, "audit")
81
+
82
+
83
+ class HTTPConnection(_HTTPConnection):
84
+ """
85
+ Based on :class:`http.client.HTTPConnection` but provides an extra constructor
86
+ backwards-compatibility layer between older and newer Pythons.
87
+
88
+ Additional keyword parameters are used to configure attributes of the connection.
89
+ Accepted parameters include:
90
+
91
+ - ``source_address``: Set the source address for the current connection.
92
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
93
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
94
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
95
+
96
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
97
+ you might pass:
98
+
99
+ .. code-block:: python
100
+
101
+ HTTPConnection.default_socket_options + [
102
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
103
+ ]
104
+
105
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
106
+ """
107
+
108
+ default_port: typing.ClassVar[int] = port_by_scheme["http"] # type: ignore[misc]
109
+
110
+ #: Disable Nagle's algorithm by default.
111
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
112
+ default_socket_options: typing.ClassVar[connection._TYPE_SOCKET_OPTIONS] = [
113
+ (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
114
+ ]
115
+
116
+ #: Whether this connection verifies the host's certificate.
117
+ is_verified: bool = False
118
+
119
+ #: Whether this proxy connection verified the proxy host's certificate.
120
+ # If no proxy is currently connected to the value will be ``None``.
121
+ proxy_is_verified: bool | None = None
122
+
123
+ blocksize: int
124
+ source_address: tuple[str, int] | None
125
+ socket_options: connection._TYPE_SOCKET_OPTIONS | None
126
+
127
+ _has_connected_to_proxy: bool
128
+ _response_options: _ResponseOptions | None
129
+ _tunnel_host: str | None
130
+ _tunnel_port: int | None
131
+ _tunnel_scheme: str | None
132
+
133
+ def __init__(
134
+ self,
135
+ host: str,
136
+ port: int | None = None,
137
+ *,
138
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
139
+ source_address: tuple[str, int] | None = None,
140
+ blocksize: int = 16384,
141
+ socket_options: None
142
+ | (connection._TYPE_SOCKET_OPTIONS) = default_socket_options,
143
+ proxy: Url | None = None,
144
+ proxy_config: ProxyConfig | None = None,
145
+ ) -> None:
146
+ super().__init__(
147
+ host=host,
148
+ port=port,
149
+ timeout=Timeout.resolve_default_timeout(timeout),
150
+ source_address=source_address,
151
+ blocksize=blocksize,
152
+ )
153
+ self.socket_options = socket_options
154
+ self.proxy = proxy
155
+ self.proxy_config = proxy_config
156
+
157
+ self._has_connected_to_proxy = False
158
+ self._response_options = None
159
+ self._tunnel_host: str | None = None
160
+ self._tunnel_port: int | None = None
161
+ self._tunnel_scheme: str | None = None
162
+
163
+ @property
164
+ def host(self) -> str:
165
+ """
166
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
167
+
168
+ In general, SSL certificates don't include the trailing dot indicating a
169
+ fully-qualified domain name, and thus, they don't validate properly when
170
+ checked against a domain name that includes the dot. In addition, some
171
+ servers may not expect to receive the trailing dot when provided.
172
+
173
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
174
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
175
+ whereas a lookup without a trailing dot will search the system's search domain
176
+ list. Thus, it's important to keep the original host around for use only in
177
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
178
+ actual TCP connection across which we're going to send HTTP requests).
179
+ """
180
+ return self._dns_host.rstrip(".")
181
+
182
+ @host.setter
183
+ def host(self, value: str) -> None:
184
+ """
185
+ Setter for the `host` property.
186
+
187
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
188
+ only uses `host`, and it seems reasonable that other libraries follow suit.
189
+ """
190
+ self._dns_host = value
191
+
192
+ def _new_conn(self) -> socket.socket:
193
+ """Establish a socket connection and set nodelay settings on it.
194
+
195
+ :return: New socket connection.
196
+ """
197
+ try:
198
+ sock = connection.create_connection(
199
+ (self._dns_host, self.port),
200
+ self.timeout,
201
+ source_address=self.source_address,
202
+ socket_options=self.socket_options,
203
+ )
204
+ except socket.gaierror as e:
205
+ raise NameResolutionError(self.host, self, e) from e
206
+ except SocketTimeout as e:
207
+ raise ConnectTimeoutError(
208
+ self,
209
+ f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
210
+ ) from e
211
+
212
+ except OSError as e:
213
+ raise NewConnectionError(
214
+ self, f"Failed to establish a new connection: {e}"
215
+ ) from e
216
+
217
+ # Audit hooks are only available in Python 3.8+
218
+ if _HAS_SYS_AUDIT:
219
+ sys.audit("http.client.connect", self, self.host, self.port)
220
+
221
+ return sock
222
+
223
+ def set_tunnel(
224
+ self,
225
+ host: str,
226
+ port: int | None = None,
227
+ headers: typing.Mapping[str, str] | None = None,
228
+ scheme: str = "http",
229
+ ) -> None:
230
+ if scheme not in ("http", "https"):
231
+ raise ValueError(
232
+ f"Invalid proxy scheme for tunneling: {scheme!r}, must be either 'http' or 'https'"
233
+ )
234
+ super().set_tunnel(host, port=port, headers=headers)
235
+ self._tunnel_scheme = scheme
236
+
237
+ def connect(self) -> None:
238
+ self.sock = self._new_conn()
239
+ if self._tunnel_host:
240
+ # If we're tunneling it means we're connected to our proxy.
241
+ self._has_connected_to_proxy = True
242
+
243
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
244
+ self._tunnel() # type: ignore[attr-defined]
245
+
246
+ # If there's a proxy to be connected to we are fully connected.
247
+ # This is set twice (once above and here) due to forwarding proxies
248
+ # not using tunnelling.
249
+ self._has_connected_to_proxy = bool(self.proxy)
250
+
251
+ if self._has_connected_to_proxy:
252
+ self.proxy_is_verified = False
253
+
254
+ @property
255
+ def is_closed(self) -> bool:
256
+ return self.sock is None
257
+
258
+ @property
259
+ def is_connected(self) -> bool:
260
+ if self.sock is None:
261
+ return False
262
+ return not wait_for_read(self.sock, timeout=0.0)
263
+
264
+ @property
265
+ def has_connected_to_proxy(self) -> bool:
266
+ return self._has_connected_to_proxy
267
+
268
+ @property
269
+ def proxy_is_forwarding(self) -> bool:
270
+ """
271
+ Return True if a forwarding proxy is configured, else return False
272
+ """
273
+ return bool(self.proxy) and self._tunnel_host is None
274
+
275
+ def close(self) -> None:
276
+ try:
277
+ super().close()
278
+ finally:
279
+ # Reset all stateful properties so connection
280
+ # can be re-used without leaking prior configs.
281
+ self.sock = None
282
+ self.is_verified = False
283
+ self.proxy_is_verified = None
284
+ self._has_connected_to_proxy = False
285
+ self._response_options = None
286
+ self._tunnel_host = None
287
+ self._tunnel_port = None
288
+ self._tunnel_scheme = None
289
+
290
+ def putrequest(
291
+ self,
292
+ method: str,
293
+ url: str,
294
+ skip_host: bool = False,
295
+ skip_accept_encoding: bool = False,
296
+ ) -> None:
297
+ """"""
298
+ # Empty docstring because the indentation of CPython's implementation
299
+ # is broken but we don't want this method in our documentation.
300
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
301
+ if match:
302
+ raise ValueError(
303
+ f"Method cannot contain non-token characters {method!r} (found at least {match.group()!r})"
304
+ )
305
+
306
+ return super().putrequest(
307
+ method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding
308
+ )
309
+
310
+ def putheader(self, header: str, *values: str) -> None: # type: ignore[override]
311
+ """"""
312
+ if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
313
+ super().putheader(header, *values)
314
+ elif to_str(header.lower()) not in SKIPPABLE_HEADERS:
315
+ skippable_headers = "', '".join(
316
+ [str.title(header) for header in sorted(SKIPPABLE_HEADERS)]
317
+ )
318
+ raise ValueError(
319
+ f"urllib3.util.SKIP_HEADER only supports '{skippable_headers}'"
320
+ )
321
+
322
+ # `request` method's signature intentionally violates LSP.
323
+ # urllib3's API is different from `http.client.HTTPConnection` and the subclassing is only incidental.
324
+ def request( # type: ignore[override]
325
+ self,
326
+ method: str,
327
+ url: str,
328
+ body: _TYPE_BODY | None = None,
329
+ headers: typing.Mapping[str, str] | None = None,
330
+ *,
331
+ chunked: bool = False,
332
+ preload_content: bool = True,
333
+ decode_content: bool = True,
334
+ enforce_content_length: bool = True,
335
+ ) -> None:
336
+ # Update the inner socket's timeout value to send the request.
337
+ # This only triggers if the connection is re-used.
338
+ if self.sock is not None:
339
+ self.sock.settimeout(self.timeout)
340
+
341
+ # Store these values to be fed into the HTTPResponse
342
+ # object later. TODO: Remove this in favor of a real
343
+ # HTTP lifecycle mechanism.
344
+
345
+ # We have to store these before we call .request()
346
+ # because sometimes we can still salvage a response
347
+ # off the wire even if we aren't able to completely
348
+ # send the request body.
349
+ self._response_options = _ResponseOptions(
350
+ request_method=method,
351
+ request_url=url,
352
+ preload_content=preload_content,
353
+ decode_content=decode_content,
354
+ enforce_content_length=enforce_content_length,
355
+ )
356
+
357
+ if headers is None:
358
+ headers = {}
359
+ header_keys = frozenset(to_str(k.lower()) for k in headers)
360
+ skip_accept_encoding = "accept-encoding" in header_keys
361
+ skip_host = "host" in header_keys
362
+ self.putrequest(
363
+ method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
364
+ )
365
+
366
+ # Transform the body into an iterable of sendall()-able chunks
367
+ # and detect if an explicit Content-Length is doable.
368
+ chunks_and_cl = body_to_chunks(body, method=method, blocksize=self.blocksize)
369
+ chunks = chunks_and_cl.chunks
370
+ content_length = chunks_and_cl.content_length
371
+
372
+ # When chunked is explicit set to 'True' we respect that.
373
+ if chunked:
374
+ if "transfer-encoding" not in header_keys:
375
+ self.putheader("Transfer-Encoding", "chunked")
376
+ else:
377
+ # Detect whether a framing mechanism is already in use. If so
378
+ # we respect that value, otherwise we pick chunked vs content-length
379
+ # depending on the type of 'body'.
380
+ if "content-length" in header_keys:
381
+ chunked = False
382
+ elif "transfer-encoding" in header_keys:
383
+ chunked = True
384
+
385
+ # Otherwise we go off the recommendation of 'body_to_chunks()'.
386
+ else:
387
+ chunked = False
388
+ if content_length is None:
389
+ if chunks is not None:
390
+ chunked = True
391
+ self.putheader("Transfer-Encoding", "chunked")
392
+ else:
393
+ self.putheader("Content-Length", str(content_length))
394
+
395
+ # Now that framing headers are out of the way we send all the other headers.
396
+ if "user-agent" not in header_keys:
397
+ self.putheader("User-Agent", _get_default_user_agent())
398
+ for header, value in headers.items():
399
+ self.putheader(header, value)
400
+ self.endheaders()
401
+
402
+ # If we're given a body we start sending that in chunks.
403
+ if chunks is not None:
404
+ for chunk in chunks:
405
+ # Sending empty chunks isn't allowed for TE: chunked
406
+ # as it indicates the end of the body.
407
+ if not chunk:
408
+ continue
409
+ if isinstance(chunk, str):
410
+ chunk = chunk.encode("utf-8")
411
+ if chunked:
412
+ self.send(b"%x\r\n%b\r\n" % (len(chunk), chunk))
413
+ else:
414
+ self.send(chunk)
415
+
416
+ # Regardless of whether we have a body or not, if we're in
417
+ # chunked mode we want to send an explicit empty chunk.
418
+ if chunked:
419
+ self.send(b"0\r\n\r\n")
420
+
421
+ def request_chunked(
422
+ self,
423
+ method: str,
424
+ url: str,
425
+ body: _TYPE_BODY | None = None,
426
+ headers: typing.Mapping[str, str] | None = None,
427
+ ) -> None:
428
+ """
429
+ Alternative to the common request method, which sends the
430
+ body with chunked encoding and not as one block
431
+ """
432
+ warnings.warn(
433
+ "HTTPConnection.request_chunked() is deprecated and will be removed "
434
+ "in urllib3 v2.1.0. Instead use HTTPConnection.request(..., chunked=True).",
435
+ category=DeprecationWarning,
436
+ stacklevel=2,
437
+ )
438
+ self.request(method, url, body=body, headers=headers, chunked=True)
439
+
440
+ def getresponse( # type: ignore[override]
441
+ self,
442
+ ) -> HTTPResponse:
443
+ """
444
+ Get the response from the server.
445
+
446
+ If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable.
447
+
448
+ If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed.
449
+ """
450
+ # Raise the same error as http.client.HTTPConnection
451
+ if self._response_options is None:
452
+ raise ResponseNotReady()
453
+
454
+ # Reset this attribute for being used again.
455
+ resp_options = self._response_options
456
+ self._response_options = None
457
+
458
+ # Since the connection's timeout value may have been updated
459
+ # we need to set the timeout on the socket.
460
+ self.sock.settimeout(self.timeout)
461
+
462
+ # This is needed here to avoid circular import errors
463
+ from .response import HTTPResponse
464
+
465
+ # Get the response from http.client.HTTPConnection
466
+ httplib_response = super().getresponse()
467
+
468
+ try:
469
+ assert_header_parsing(httplib_response.msg)
470
+ except (HeaderParsingError, TypeError) as hpe:
471
+ log.warning(
472
+ "Failed to parse headers (url=%s): %s",
473
+ _url_from_connection(self, resp_options.request_url),
474
+ hpe,
475
+ exc_info=True,
476
+ )
477
+
478
+ headers = HTTPHeaderDict(httplib_response.msg.items())
479
+
480
+ response = HTTPResponse(
481
+ body=httplib_response,
482
+ headers=headers,
483
+ status=httplib_response.status,
484
+ version=httplib_response.version,
485
+ reason=httplib_response.reason,
486
+ preload_content=resp_options.preload_content,
487
+ decode_content=resp_options.decode_content,
488
+ original_response=httplib_response,
489
+ enforce_content_length=resp_options.enforce_content_length,
490
+ request_method=resp_options.request_method,
491
+ request_url=resp_options.request_url,
492
+ )
493
+ return response
494
+
495
+
496
+ class HTTPSConnection(HTTPConnection):
497
+ """
498
+ Many of the parameters to this constructor are passed to the underlying SSL
499
+ socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
500
+ """
501
+
502
+ default_port = port_by_scheme["https"] # type: ignore[misc]
503
+
504
+ cert_reqs: int | str | None = None
505
+ ca_certs: str | None = None
506
+ ca_cert_dir: str | None = None
507
+ ca_cert_data: None | str | bytes = None
508
+ ssl_version: int | str | None = None
509
+ ssl_minimum_version: int | None = None
510
+ ssl_maximum_version: int | None = None
511
+ assert_fingerprint: str | None = None
512
+
513
+ def __init__(
514
+ self,
515
+ host: str,
516
+ port: int | None = None,
517
+ *,
518
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
519
+ source_address: tuple[str, int] | None = None,
520
+ blocksize: int = 16384,
521
+ socket_options: None
522
+ | (connection._TYPE_SOCKET_OPTIONS) = HTTPConnection.default_socket_options,
523
+ proxy: Url | None = None,
524
+ proxy_config: ProxyConfig | None = None,
525
+ cert_reqs: int | str | None = None,
526
+ assert_hostname: None | str | Literal[False] = None,
527
+ assert_fingerprint: str | None = None,
528
+ server_hostname: str | None = None,
529
+ ssl_context: ssl.SSLContext | None = None,
530
+ ca_certs: str | None = None,
531
+ ca_cert_dir: str | None = None,
532
+ ca_cert_data: None | str | bytes = None,
533
+ ssl_minimum_version: int | None = None,
534
+ ssl_maximum_version: int | None = None,
535
+ ssl_version: int | str | None = None, # Deprecated
536
+ cert_file: str | None = None,
537
+ key_file: str | None = None,
538
+ key_password: str | None = None,
539
+ ) -> None:
540
+ super().__init__(
541
+ host,
542
+ port=port,
543
+ timeout=timeout,
544
+ source_address=source_address,
545
+ blocksize=blocksize,
546
+ socket_options=socket_options,
547
+ proxy=proxy,
548
+ proxy_config=proxy_config,
549
+ )
550
+
551
+ self.key_file = key_file
552
+ self.cert_file = cert_file
553
+ self.key_password = key_password
554
+ self.ssl_context = ssl_context
555
+ self.server_hostname = server_hostname
556
+ self.assert_hostname = assert_hostname
557
+ self.assert_fingerprint = assert_fingerprint
558
+ self.ssl_version = ssl_version
559
+ self.ssl_minimum_version = ssl_minimum_version
560
+ self.ssl_maximum_version = ssl_maximum_version
561
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
562
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
563
+ self.ca_cert_data = ca_cert_data
564
+
565
+ # cert_reqs depends on ssl_context so calculate last.
566
+ if cert_reqs is None:
567
+ if self.ssl_context is not None:
568
+ cert_reqs = self.ssl_context.verify_mode
569
+ else:
570
+ cert_reqs = resolve_cert_reqs(None)
571
+ self.cert_reqs = cert_reqs
572
+
573
+ def set_cert(
574
+ self,
575
+ key_file: str | None = None,
576
+ cert_file: str | None = None,
577
+ cert_reqs: int | str | None = None,
578
+ key_password: str | None = None,
579
+ ca_certs: str | None = None,
580
+ assert_hostname: None | str | Literal[False] = None,
581
+ assert_fingerprint: str | None = None,
582
+ ca_cert_dir: str | None = None,
583
+ ca_cert_data: None | str | bytes = None,
584
+ ) -> None:
585
+ """
586
+ This method should only be called once, before the connection is used.
587
+ """
588
+ warnings.warn(
589
+ "HTTPSConnection.set_cert() is deprecated and will be removed "
590
+ "in urllib3 v2.1.0. Instead provide the parameters to the "
591
+ "HTTPSConnection constructor.",
592
+ category=DeprecationWarning,
593
+ stacklevel=2,
594
+ )
595
+
596
+ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
597
+ # have an SSLContext object in which case we'll use its verify_mode.
598
+ if cert_reqs is None:
599
+ if self.ssl_context is not None:
600
+ cert_reqs = self.ssl_context.verify_mode
601
+ else:
602
+ cert_reqs = resolve_cert_reqs(None)
603
+
604
+ self.key_file = key_file
605
+ self.cert_file = cert_file
606
+ self.cert_reqs = cert_reqs
607
+ self.key_password = key_password
608
+ self.assert_hostname = assert_hostname
609
+ self.assert_fingerprint = assert_fingerprint
610
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
611
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
612
+ self.ca_cert_data = ca_cert_data
613
+
614
+ def connect(self) -> None:
615
+ sock: socket.socket | ssl.SSLSocket
616
+ self.sock = sock = self._new_conn()
617
+ server_hostname: str = self.host
618
+ tls_in_tls = False
619
+
620
+ # Do we need to establish a tunnel?
621
+ if self._tunnel_host is not None:
622
+ # We're tunneling to an HTTPS origin so need to do TLS-in-TLS.
623
+ if self._tunnel_scheme == "https":
624
+ # _connect_tls_proxy will verify and assign proxy_is_verified
625
+ self.sock = sock = self._connect_tls_proxy(self.host, sock)
626
+ tls_in_tls = True
627
+ elif self._tunnel_scheme == "http":
628
+ self.proxy_is_verified = False
629
+
630
+ # If we're tunneling it means we're connected to our proxy.
631
+ self._has_connected_to_proxy = True
632
+
633
+ self._tunnel() # type: ignore[attr-defined]
634
+ # Override the host with the one we're requesting data from.
635
+ server_hostname = self._tunnel_host
636
+
637
+ if self.server_hostname is not None:
638
+ server_hostname = self.server_hostname
639
+
640
+ is_time_off = datetime.date.today() < RECENT_DATE
641
+ if is_time_off:
642
+ warnings.warn(
643
+ (
644
+ f"System time is way off (before {RECENT_DATE}). This will probably "
645
+ "lead to SSL verification errors"
646
+ ),
647
+ SystemTimeWarning,
648
+ )
649
+
650
+ # Remove trailing '.' from fqdn hostnames to allow certificate validation
651
+ server_hostname_rm_dot = server_hostname.rstrip(".")
652
+
653
+ sock_and_verified = _ssl_wrap_socket_and_match_hostname(
654
+ sock=sock,
655
+ cert_reqs=self.cert_reqs,
656
+ ssl_version=self.ssl_version,
657
+ ssl_minimum_version=self.ssl_minimum_version,
658
+ ssl_maximum_version=self.ssl_maximum_version,
659
+ ca_certs=self.ca_certs,
660
+ ca_cert_dir=self.ca_cert_dir,
661
+ ca_cert_data=self.ca_cert_data,
662
+ cert_file=self.cert_file,
663
+ key_file=self.key_file,
664
+ key_password=self.key_password,
665
+ server_hostname=server_hostname_rm_dot,
666
+ ssl_context=self.ssl_context,
667
+ tls_in_tls=tls_in_tls,
668
+ assert_hostname=self.assert_hostname,
669
+ assert_fingerprint=self.assert_fingerprint,
670
+ )
671
+ self.sock = sock_and_verified.socket
672
+
673
+ # Forwarding proxies can never have a verified target since
674
+ # the proxy is the one doing the verification. Should instead
675
+ # use a CONNECT tunnel in order to verify the target.
676
+ # See: https://github.com/urllib3/urllib3/issues/3267.
677
+ if self.proxy_is_forwarding:
678
+ self.is_verified = False
679
+ else:
680
+ self.is_verified = sock_and_verified.is_verified
681
+
682
+ # If there's a proxy to be connected to we are fully connected.
683
+ # This is set twice (once above and here) due to forwarding proxies
684
+ # not using tunnelling.
685
+ self._has_connected_to_proxy = bool(self.proxy)
686
+
687
+ # Set `self.proxy_is_verified` unless it's already set while
688
+ # establishing a tunnel.
689
+ if self._has_connected_to_proxy and self.proxy_is_verified is None:
690
+ self.proxy_is_verified = sock_and_verified.is_verified
691
+
692
+ def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> ssl.SSLSocket:
693
+ """
694
+ Establish a TLS connection to the proxy using the provided SSL context.
695
+ """
696
+ # `_connect_tls_proxy` is called when self._tunnel_host is truthy.
697
+ proxy_config = typing.cast(ProxyConfig, self.proxy_config)
698
+ ssl_context = proxy_config.ssl_context
699
+ sock_and_verified = _ssl_wrap_socket_and_match_hostname(
700
+ sock,
701
+ cert_reqs=self.cert_reqs,
702
+ ssl_version=self.ssl_version,
703
+ ssl_minimum_version=self.ssl_minimum_version,
704
+ ssl_maximum_version=self.ssl_maximum_version,
705
+ ca_certs=self.ca_certs,
706
+ ca_cert_dir=self.ca_cert_dir,
707
+ ca_cert_data=self.ca_cert_data,
708
+ server_hostname=hostname,
709
+ ssl_context=ssl_context,
710
+ assert_hostname=proxy_config.assert_hostname,
711
+ assert_fingerprint=proxy_config.assert_fingerprint,
712
+ # Features that aren't implemented for proxies yet:
713
+ cert_file=None,
714
+ key_file=None,
715
+ key_password=None,
716
+ tls_in_tls=False,
717
+ )
718
+ self.proxy_is_verified = sock_and_verified.is_verified
719
+ return sock_and_verified.socket # type: ignore[return-value]
720
+
721
+
722
+ class _WrappedAndVerifiedSocket(typing.NamedTuple):
723
+ """
724
+ Wrapped socket and whether the connection is
725
+ verified after the TLS handshake
726
+ """
727
+
728
+ socket: ssl.SSLSocket | SSLTransport
729
+ is_verified: bool
730
+
731
+
732
+ def _ssl_wrap_socket_and_match_hostname(
733
+ sock: socket.socket,
734
+ *,
735
+ cert_reqs: None | str | int,
736
+ ssl_version: None | str | int,
737
+ ssl_minimum_version: int | None,
738
+ ssl_maximum_version: int | None,
739
+ cert_file: str | None,
740
+ key_file: str | None,
741
+ key_password: str | None,
742
+ ca_certs: str | None,
743
+ ca_cert_dir: str | None,
744
+ ca_cert_data: None | str | bytes,
745
+ assert_hostname: None | str | Literal[False],
746
+ assert_fingerprint: str | None,
747
+ server_hostname: str | None,
748
+ ssl_context: ssl.SSLContext | None,
749
+ tls_in_tls: bool = False,
750
+ ) -> _WrappedAndVerifiedSocket:
751
+ """Logic for constructing an SSLContext from all TLS parameters, passing
752
+ that down into ssl_wrap_socket, and then doing certificate verification
753
+ either via hostname or fingerprint. This function exists to guarantee
754
+ that both proxies and targets have the same behavior when connecting via TLS.
755
+ """
756
+ default_ssl_context = False
757
+ if ssl_context is None:
758
+ default_ssl_context = True
759
+ context = create_urllib3_context(
760
+ ssl_version=resolve_ssl_version(ssl_version),
761
+ ssl_minimum_version=ssl_minimum_version,
762
+ ssl_maximum_version=ssl_maximum_version,
763
+ cert_reqs=resolve_cert_reqs(cert_reqs),
764
+ )
765
+ else:
766
+ context = ssl_context
767
+
768
+ context.verify_mode = resolve_cert_reqs(cert_reqs)
769
+
770
+ # In some cases, we want to verify hostnames ourselves
771
+ if (
772
+ # `ssl` can't verify fingerprints or alternate hostnames
773
+ assert_fingerprint
774
+ or assert_hostname
775
+ # assert_hostname can be set to False to disable hostname checking
776
+ or assert_hostname is False
777
+ # We still support OpenSSL 1.0.2, which prevents us from verifying
778
+ # hostnames easily: https://github.com/pyca/pyopenssl/pull/933
779
+ or ssl_.IS_PYOPENSSL
780
+ or not ssl_.HAS_NEVER_CHECK_COMMON_NAME
781
+ ):
782
+ context.check_hostname = False
783
+
784
+ # Try to load OS default certs if none are given. We need to do the hasattr() check
785
+ # for custom pyOpenSSL SSLContext objects because they don't support
786
+ # load_default_certs().
787
+ if (
788
+ not ca_certs
789
+ and not ca_cert_dir
790
+ and not ca_cert_data
791
+ and default_ssl_context
792
+ and hasattr(context, "load_default_certs")
793
+ ):
794
+ context.load_default_certs()
795
+
796
+ # Ensure that IPv6 addresses are in the proper format and don't have a
797
+ # scope ID. Python's SSL module fails to recognize scoped IPv6 addresses
798
+ # and interprets them as DNS hostnames.
799
+ if server_hostname is not None:
800
+ normalized = server_hostname.strip("[]")
801
+ if "%" in normalized:
802
+ normalized = normalized[: normalized.rfind("%")]
803
+ if is_ipaddress(normalized):
804
+ server_hostname = normalized
805
+
806
+ ssl_sock = ssl_wrap_socket(
807
+ sock=sock,
808
+ keyfile=key_file,
809
+ certfile=cert_file,
810
+ key_password=key_password,
811
+ ca_certs=ca_certs,
812
+ ca_cert_dir=ca_cert_dir,
813
+ ca_cert_data=ca_cert_data,
814
+ server_hostname=server_hostname,
815
+ ssl_context=context,
816
+ tls_in_tls=tls_in_tls,
817
+ )
818
+
819
+ try:
820
+ if assert_fingerprint:
821
+ _assert_fingerprint(
822
+ ssl_sock.getpeercert(binary_form=True), assert_fingerprint
823
+ )
824
+ elif (
825
+ context.verify_mode != ssl.CERT_NONE
826
+ and not context.check_hostname
827
+ and assert_hostname is not False
828
+ ):
829
+ cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment]
830
+
831
+ # Need to signal to our match_hostname whether to use 'commonName' or not.
832
+ # If we're using our own constructed SSLContext we explicitly set 'False'
833
+ # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name.
834
+ if default_ssl_context:
835
+ hostname_checks_common_name = False
836
+ else:
837
+ hostname_checks_common_name = (
838
+ getattr(context, "hostname_checks_common_name", False) or False
839
+ )
840
+
841
+ _match_hostname(
842
+ cert,
843
+ assert_hostname or server_hostname, # type: ignore[arg-type]
844
+ hostname_checks_common_name,
845
+ )
846
+
847
+ return _WrappedAndVerifiedSocket(
848
+ socket=ssl_sock,
849
+ is_verified=context.verify_mode == ssl.CERT_REQUIRED
850
+ or bool(assert_fingerprint),
851
+ )
852
+ except BaseException:
853
+ ssl_sock.close()
854
+ raise
855
+
856
+
857
+ def _match_hostname(
858
+ cert: _TYPE_PEER_CERT_RET_DICT | None,
859
+ asserted_hostname: str,
860
+ hostname_checks_common_name: bool = False,
861
+ ) -> None:
862
+ # Our upstream implementation of ssl.match_hostname()
863
+ # only applies this normalization to IP addresses so it doesn't
864
+ # match DNS SANs so we do the same thing!
865
+ stripped_hostname = asserted_hostname.strip("[]")
866
+ if is_ipaddress(stripped_hostname):
867
+ asserted_hostname = stripped_hostname
868
+
869
+ try:
870
+ match_hostname(cert, asserted_hostname, hostname_checks_common_name)
871
+ except CertificateError as e:
872
+ log.warning(
873
+ "Certificate did not match expected hostname: %s. Certificate: %s",
874
+ asserted_hostname,
875
+ cert,
876
+ )
877
+ # Add cert to exception and reraise so client code can inspect
878
+ # the cert when catching the exception, if they want to
879
+ e._peer_cert = cert # type: ignore[attr-defined]
880
+ raise
881
+
882
+
883
+ def _wrap_proxy_error(err: Exception, proxy_scheme: str | None) -> ProxyError:
884
+ # Look for the phrase 'wrong version number', if found
885
+ # then we should warn the user that we're very sure that
886
+ # this proxy is HTTP-only and they have a configuration issue.
887
+ error_normalized = " ".join(re.split("[^a-z]", str(err).lower()))
888
+ is_likely_http_proxy = (
889
+ "wrong version number" in error_normalized
890
+ or "unknown protocol" in error_normalized
891
+ or "record layer failure" in error_normalized
892
+ )
893
+ http_proxy_warning = (
894
+ ". Your proxy appears to only use HTTP and not HTTPS, "
895
+ "try changing your proxy URL to be HTTP. See: "
896
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
897
+ "#https-proxy-error-http-proxy"
898
+ )
899
+ new_err = ProxyError(
900
+ f"Unable to connect to proxy"
901
+ f"{http_proxy_warning if is_likely_http_proxy and proxy_scheme == 'https' else ''}",
902
+ err,
903
+ )
904
+ new_err.__cause__ = err
905
+ return new_err
906
+
907
+
908
+ def _get_default_user_agent() -> str:
909
+ return f"python-urllib3/{__version__}"
910
+
911
+
912
+ class DummyConnection:
913
+ """Used to detect a failed ConnectionCls import."""
914
+
915
+
916
+ if not ssl:
917
+ HTTPSConnection = DummyConnection # type: ignore[misc, assignment] # noqa: F811
918
+
919
+
920
+ VerifiedHTTPSConnection = HTTPSConnection
921
+
922
+
923
+ def _url_from_connection(
924
+ conn: HTTPConnection | HTTPSConnection, path: str | None = None
925
+ ) -> str:
926
+ """Returns the URL from a given connection. This is mainly used for testing and logging."""
927
+
928
+ scheme = "https" if isinstance(conn, HTTPSConnection) else "http"
929
+
930
+ return Url(scheme=scheme, host=conn.host, port=conn.port, path=path).url
venv/lib/python3.10/site-packages/urllib3/connectionpool.py ADDED
@@ -0,0 +1,1186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import errno
4
+ import logging
5
+ import queue
6
+ import sys
7
+ import typing
8
+ import warnings
9
+ import weakref
10
+ from socket import timeout as SocketTimeout
11
+ from types import TracebackType
12
+
13
+ from ._base_connection import _TYPE_BODY
14
+ from ._collections import HTTPHeaderDict
15
+ from ._request_methods import RequestMethods
16
+ from .connection import (
17
+ BaseSSLError,
18
+ BrokenPipeError,
19
+ DummyConnection,
20
+ HTTPConnection,
21
+ HTTPException,
22
+ HTTPSConnection,
23
+ ProxyConfig,
24
+ _wrap_proxy_error,
25
+ )
26
+ from .connection import port_by_scheme as port_by_scheme
27
+ from .exceptions import (
28
+ ClosedPoolError,
29
+ EmptyPoolError,
30
+ FullPoolError,
31
+ HostChangedError,
32
+ InsecureRequestWarning,
33
+ LocationValueError,
34
+ MaxRetryError,
35
+ NewConnectionError,
36
+ ProtocolError,
37
+ ProxyError,
38
+ ReadTimeoutError,
39
+ SSLError,
40
+ TimeoutError,
41
+ )
42
+ from .response import BaseHTTPResponse
43
+ from .util.connection import is_connection_dropped
44
+ from .util.proxy import connection_requires_http_tunnel
45
+ from .util.request import _TYPE_BODY_POSITION, set_file_position
46
+ from .util.retry import Retry
47
+ from .util.ssl_match_hostname import CertificateError
48
+ from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
49
+ from .util.url import Url, _encode_target
50
+ from .util.url import _normalize_host as normalize_host
51
+ from .util.url import parse_url
52
+ from .util.util import to_str
53
+
54
+ if typing.TYPE_CHECKING:
55
+ import ssl
56
+ from typing import Literal
57
+
58
+ from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection
59
+
60
+ log = logging.getLogger(__name__)
61
+
62
+ _TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None]
63
+
64
+ _SelfT = typing.TypeVar("_SelfT")
65
+
66
+
67
+ # Pool objects
68
+ class ConnectionPool:
69
+ """
70
+ Base class for all connection pools, such as
71
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
72
+
73
+ .. note::
74
+ ConnectionPool.urlopen() does not normalize or percent-encode target URIs
75
+ which is useful if your target server doesn't support percent-encoded
76
+ target URIs.
77
+ """
78
+
79
+ scheme: str | None = None
80
+ QueueCls = queue.LifoQueue
81
+
82
+ def __init__(self, host: str, port: int | None = None) -> None:
83
+ if not host:
84
+ raise LocationValueError("No host specified.")
85
+
86
+ self.host = _normalize_host(host, scheme=self.scheme)
87
+ self.port = port
88
+
89
+ # This property uses 'normalize_host()' (not '_normalize_host()')
90
+ # to avoid removing square braces around IPv6 addresses.
91
+ # This value is sent to `HTTPConnection.set_tunnel()` if called
92
+ # because square braces are required for HTTP CONNECT tunneling.
93
+ self._tunnel_host = normalize_host(host, scheme=self.scheme).lower()
94
+
95
+ def __str__(self) -> str:
96
+ return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})"
97
+
98
+ def __enter__(self: _SelfT) -> _SelfT:
99
+ return self
100
+
101
+ def __exit__(
102
+ self,
103
+ exc_type: type[BaseException] | None,
104
+ exc_val: BaseException | None,
105
+ exc_tb: TracebackType | None,
106
+ ) -> Literal[False]:
107
+ self.close()
108
+ # Return False to re-raise any potential exceptions
109
+ return False
110
+
111
+ def close(self) -> None:
112
+ """
113
+ Close all pooled connections and disable the pool.
114
+ """
115
+
116
+
117
+ # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
118
+ _blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
119
+
120
+
121
+ class HTTPConnectionPool(ConnectionPool, RequestMethods):
122
+ """
123
+ Thread-safe connection pool for one host.
124
+
125
+ :param host:
126
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
127
+ :class:`http.client.HTTPConnection`.
128
+
129
+ :param port:
130
+ Port used for this HTTP Connection (None is equivalent to 80), passed
131
+ into :class:`http.client.HTTPConnection`.
132
+
133
+ :param timeout:
134
+ Socket timeout in seconds for each individual connection. This can
135
+ be a float or integer, which sets the timeout for the HTTP request,
136
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
137
+ fine-grained control over request timeouts. After the constructor has
138
+ been parsed, this is always a `urllib3.util.Timeout` object.
139
+
140
+ :param maxsize:
141
+ Number of connections to save that can be reused. More than 1 is useful
142
+ in multithreaded situations. If ``block`` is set to False, more
143
+ connections will be created but they will not be saved once they've
144
+ been used.
145
+
146
+ :param block:
147
+ If set to True, no more than ``maxsize`` connections will be used at
148
+ a time. When no free connections are available, the call will block
149
+ until a connection has been released. This is a useful side effect for
150
+ particular multithreaded situations where one does not want to use more
151
+ than maxsize connections per host to prevent flooding.
152
+
153
+ :param headers:
154
+ Headers to include with all requests, unless other headers are given
155
+ explicitly.
156
+
157
+ :param retries:
158
+ Retry configuration to use by default with requests in this pool.
159
+
160
+ :param _proxy:
161
+ Parsed proxy URL, should not be used directly, instead, see
162
+ :class:`urllib3.ProxyManager`
163
+
164
+ :param _proxy_headers:
165
+ A dictionary with proxy headers, should not be used directly,
166
+ instead, see :class:`urllib3.ProxyManager`
167
+
168
+ :param \\**conn_kw:
169
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
170
+ :class:`urllib3.connection.HTTPSConnection` instances.
171
+ """
172
+
173
+ scheme = "http"
174
+ ConnectionCls: (
175
+ type[BaseHTTPConnection] | type[BaseHTTPSConnection]
176
+ ) = HTTPConnection
177
+
178
+ def __init__(
179
+ self,
180
+ host: str,
181
+ port: int | None = None,
182
+ timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT,
183
+ maxsize: int = 1,
184
+ block: bool = False,
185
+ headers: typing.Mapping[str, str] | None = None,
186
+ retries: Retry | bool | int | None = None,
187
+ _proxy: Url | None = None,
188
+ _proxy_headers: typing.Mapping[str, str] | None = None,
189
+ _proxy_config: ProxyConfig | None = None,
190
+ **conn_kw: typing.Any,
191
+ ):
192
+ ConnectionPool.__init__(self, host, port)
193
+ RequestMethods.__init__(self, headers)
194
+
195
+ if not isinstance(timeout, Timeout):
196
+ timeout = Timeout.from_float(timeout)
197
+
198
+ if retries is None:
199
+ retries = Retry.DEFAULT
200
+
201
+ self.timeout = timeout
202
+ self.retries = retries
203
+
204
+ self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize)
205
+ self.block = block
206
+
207
+ self.proxy = _proxy
208
+ self.proxy_headers = _proxy_headers or {}
209
+ self.proxy_config = _proxy_config
210
+
211
+ # Fill the queue up so that doing get() on it will block properly
212
+ for _ in range(maxsize):
213
+ self.pool.put(None)
214
+
215
+ # These are mostly for testing and debugging purposes.
216
+ self.num_connections = 0
217
+ self.num_requests = 0
218
+ self.conn_kw = conn_kw
219
+
220
+ if self.proxy:
221
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
222
+ # We cannot know if the user has added default socket options, so we cannot replace the
223
+ # list.
224
+ self.conn_kw.setdefault("socket_options", [])
225
+
226
+ self.conn_kw["proxy"] = self.proxy
227
+ self.conn_kw["proxy_config"] = self.proxy_config
228
+
229
+ # Do not pass 'self' as callback to 'finalize'.
230
+ # Then the 'finalize' would keep an endless living (leak) to self.
231
+ # By just passing a reference to the pool allows the garbage collector
232
+ # to free self if nobody else has a reference to it.
233
+ pool = self.pool
234
+
235
+ # Close all the HTTPConnections in the pool before the
236
+ # HTTPConnectionPool object is garbage collected.
237
+ weakref.finalize(self, _close_pool_connections, pool)
238
+
239
+ def _new_conn(self) -> BaseHTTPConnection:
240
+ """
241
+ Return a fresh :class:`HTTPConnection`.
242
+ """
243
+ self.num_connections += 1
244
+ log.debug(
245
+ "Starting new HTTP connection (%d): %s:%s",
246
+ self.num_connections,
247
+ self.host,
248
+ self.port or "80",
249
+ )
250
+
251
+ conn = self.ConnectionCls(
252
+ host=self.host,
253
+ port=self.port,
254
+ timeout=self.timeout.connect_timeout,
255
+ **self.conn_kw,
256
+ )
257
+ return conn
258
+
259
+ def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection:
260
+ """
261
+ Get a connection. Will return a pooled connection if one is available.
262
+
263
+ If no connections are available and :prop:`.block` is ``False``, then a
264
+ fresh connection is returned.
265
+
266
+ :param timeout:
267
+ Seconds to wait before giving up and raising
268
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
269
+ :prop:`.block` is ``True``.
270
+ """
271
+ conn = None
272
+
273
+ if self.pool is None:
274
+ raise ClosedPoolError(self, "Pool is closed.")
275
+
276
+ try:
277
+ conn = self.pool.get(block=self.block, timeout=timeout)
278
+
279
+ except AttributeError: # self.pool is None
280
+ raise ClosedPoolError(self, "Pool is closed.") from None # Defensive:
281
+
282
+ except queue.Empty:
283
+ if self.block:
284
+ raise EmptyPoolError(
285
+ self,
286
+ "Pool is empty and a new connection can't be opened due to blocking mode.",
287
+ ) from None
288
+ pass # Oh well, we'll create a new connection then
289
+
290
+ # If this is a persistent connection, check if it got disconnected
291
+ if conn and is_connection_dropped(conn):
292
+ log.debug("Resetting dropped connection: %s", self.host)
293
+ conn.close()
294
+
295
+ return conn or self._new_conn()
296
+
297
+ def _put_conn(self, conn: BaseHTTPConnection | None) -> None:
298
+ """
299
+ Put a connection back into the pool.
300
+
301
+ :param conn:
302
+ Connection object for the current host and port as returned by
303
+ :meth:`._new_conn` or :meth:`._get_conn`.
304
+
305
+ If the pool is already full, the connection is closed and discarded
306
+ because we exceeded maxsize. If connections are discarded frequently,
307
+ then maxsize should be increased.
308
+
309
+ If the pool is closed, then the connection will be closed and discarded.
310
+ """
311
+ if self.pool is not None:
312
+ try:
313
+ self.pool.put(conn, block=False)
314
+ return # Everything is dandy, done.
315
+ except AttributeError:
316
+ # self.pool is None.
317
+ pass
318
+ except queue.Full:
319
+ # Connection never got put back into the pool, close it.
320
+ if conn:
321
+ conn.close()
322
+
323
+ if self.block:
324
+ # This should never happen if you got the conn from self._get_conn
325
+ raise FullPoolError(
326
+ self,
327
+ "Pool reached maximum size and no more connections are allowed.",
328
+ ) from None
329
+
330
+ log.warning(
331
+ "Connection pool is full, discarding connection: %s. Connection pool size: %s",
332
+ self.host,
333
+ self.pool.qsize(),
334
+ )
335
+
336
+ # Connection never got put back into the pool, close it.
337
+ if conn:
338
+ conn.close()
339
+
340
+ def _validate_conn(self, conn: BaseHTTPConnection) -> None:
341
+ """
342
+ Called right before a request is made, after the socket is created.
343
+ """
344
+
345
+ def _prepare_proxy(self, conn: BaseHTTPConnection) -> None:
346
+ # Nothing to do for HTTP connections.
347
+ pass
348
+
349
+ def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout:
350
+ """Helper that always returns a :class:`urllib3.util.Timeout`"""
351
+ if timeout is _DEFAULT_TIMEOUT:
352
+ return self.timeout.clone()
353
+
354
+ if isinstance(timeout, Timeout):
355
+ return timeout.clone()
356
+ else:
357
+ # User passed us an int/float. This is for backwards compatibility,
358
+ # can be removed later
359
+ return Timeout.from_float(timeout)
360
+
361
+ def _raise_timeout(
362
+ self,
363
+ err: BaseSSLError | OSError | SocketTimeout,
364
+ url: str,
365
+ timeout_value: _TYPE_TIMEOUT | None,
366
+ ) -> None:
367
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
368
+
369
+ if isinstance(err, SocketTimeout):
370
+ raise ReadTimeoutError(
371
+ self, url, f"Read timed out. (read timeout={timeout_value})"
372
+ ) from err
373
+
374
+ # See the above comment about EAGAIN in Python 3.
375
+ if hasattr(err, "errno") and err.errno in _blocking_errnos:
376
+ raise ReadTimeoutError(
377
+ self, url, f"Read timed out. (read timeout={timeout_value})"
378
+ ) from err
379
+
380
+ def _make_request(
381
+ self,
382
+ conn: BaseHTTPConnection,
383
+ method: str,
384
+ url: str,
385
+ body: _TYPE_BODY | None = None,
386
+ headers: typing.Mapping[str, str] | None = None,
387
+ retries: Retry | None = None,
388
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
389
+ chunked: bool = False,
390
+ response_conn: BaseHTTPConnection | None = None,
391
+ preload_content: bool = True,
392
+ decode_content: bool = True,
393
+ enforce_content_length: bool = True,
394
+ ) -> BaseHTTPResponse:
395
+ """
396
+ Perform a request on a given urllib connection object taken from our
397
+ pool.
398
+
399
+ :param conn:
400
+ a connection from one of our connection pools
401
+
402
+ :param method:
403
+ HTTP request method (such as GET, POST, PUT, etc.)
404
+
405
+ :param url:
406
+ The URL to perform the request on.
407
+
408
+ :param body:
409
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
410
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
411
+
412
+ :param headers:
413
+ Dictionary of custom headers to send, such as User-Agent,
414
+ If-None-Match, etc. If None, pool headers are used. If provided,
415
+ these headers completely replace any pool-specific headers.
416
+
417
+ :param retries:
418
+ Configure the number of retries to allow before raising a
419
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
420
+
421
+ Pass ``None`` to retry until you receive a response. Pass a
422
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
423
+ over different types of retries.
424
+ Pass an integer number to retry connection errors that many times,
425
+ but no other types of errors. Pass zero to never retry.
426
+
427
+ If ``False``, then retries are disabled and any exception is raised
428
+ immediately. Also, instead of raising a MaxRetryError on redirects,
429
+ the redirect response will be returned.
430
+
431
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
432
+
433
+ :param timeout:
434
+ If specified, overrides the default timeout for this one
435
+ request. It may be a float (in seconds) or an instance of
436
+ :class:`urllib3.util.Timeout`.
437
+
438
+ :param chunked:
439
+ If True, urllib3 will send the body using chunked transfer
440
+ encoding. Otherwise, urllib3 will send the body using the standard
441
+ content-length form. Defaults to False.
442
+
443
+ :param response_conn:
444
+ Set this to ``None`` if you will handle releasing the connection or
445
+ set the connection to have the response release it.
446
+
447
+ :param preload_content:
448
+ If True, the response's body will be preloaded during construction.
449
+
450
+ :param decode_content:
451
+ If True, will attempt to decode the body based on the
452
+ 'content-encoding' header.
453
+
454
+ :param enforce_content_length:
455
+ Enforce content length checking. Body returned by server must match
456
+ value of Content-Length header, if present. Otherwise, raise error.
457
+ """
458
+ self.num_requests += 1
459
+
460
+ timeout_obj = self._get_timeout(timeout)
461
+ timeout_obj.start_connect()
462
+ conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
463
+
464
+ try:
465
+ # Trigger any extra validation we need to do.
466
+ try:
467
+ self._validate_conn(conn)
468
+ except (SocketTimeout, BaseSSLError) as e:
469
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
470
+ raise
471
+
472
+ # _validate_conn() starts the connection to an HTTPS proxy
473
+ # so we need to wrap errors with 'ProxyError' here too.
474
+ except (
475
+ OSError,
476
+ NewConnectionError,
477
+ TimeoutError,
478
+ BaseSSLError,
479
+ CertificateError,
480
+ SSLError,
481
+ ) as e:
482
+ new_e: Exception = e
483
+ if isinstance(e, (BaseSSLError, CertificateError)):
484
+ new_e = SSLError(e)
485
+ # If the connection didn't successfully connect to it's proxy
486
+ # then there
487
+ if isinstance(
488
+ new_e, (OSError, NewConnectionError, TimeoutError, SSLError)
489
+ ) and (conn and conn.proxy and not conn.has_connected_to_proxy):
490
+ new_e = _wrap_proxy_error(new_e, conn.proxy.scheme)
491
+ raise new_e
492
+
493
+ # conn.request() calls http.client.*.request, not the method in
494
+ # urllib3.request. It also calls makefile (recv) on the socket.
495
+ try:
496
+ conn.request(
497
+ method,
498
+ url,
499
+ body=body,
500
+ headers=headers,
501
+ chunked=chunked,
502
+ preload_content=preload_content,
503
+ decode_content=decode_content,
504
+ enforce_content_length=enforce_content_length,
505
+ )
506
+
507
+ # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
508
+ # legitimately able to close the connection after sending a valid response.
509
+ # With this behaviour, the received response is still readable.
510
+ except BrokenPipeError:
511
+ pass
512
+ except OSError as e:
513
+ # MacOS/Linux
514
+ # EPROTOTYPE and ECONNRESET are needed on macOS
515
+ # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
516
+ # Condition changed later to emit ECONNRESET instead of only EPROTOTYPE.
517
+ if e.errno != errno.EPROTOTYPE and e.errno != errno.ECONNRESET:
518
+ raise
519
+
520
+ # Reset the timeout for the recv() on the socket
521
+ read_timeout = timeout_obj.read_timeout
522
+
523
+ if not conn.is_closed:
524
+ # In Python 3 socket.py will catch EAGAIN and return None when you
525
+ # try and read into the file pointer created by http.client, which
526
+ # instead raises a BadStatusLine exception. Instead of catching
527
+ # the exception and assuming all BadStatusLine exceptions are read
528
+ # timeouts, check for a zero timeout before making the request.
529
+ if read_timeout == 0:
530
+ raise ReadTimeoutError(
531
+ self, url, f"Read timed out. (read timeout={read_timeout})"
532
+ )
533
+ conn.timeout = read_timeout
534
+
535
+ # Receive the response from the server
536
+ try:
537
+ response = conn.getresponse()
538
+ except (BaseSSLError, OSError) as e:
539
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
540
+ raise
541
+
542
+ # Set properties that are used by the pooling layer.
543
+ response.retries = retries
544
+ response._connection = response_conn # type: ignore[attr-defined]
545
+ response._pool = self # type: ignore[attr-defined]
546
+
547
+ # emscripten connection doesn't have _http_vsn_str
548
+ http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
549
+ log.debug(
550
+ '%s://%s:%s "%s %s %s" %s %s',
551
+ self.scheme,
552
+ self.host,
553
+ self.port,
554
+ method,
555
+ url,
556
+ # HTTP version
557
+ http_version,
558
+ response.status,
559
+ response.length_remaining,
560
+ )
561
+
562
+ return response
563
+
564
+ def close(self) -> None:
565
+ """
566
+ Close all pooled connections and disable the pool.
567
+ """
568
+ if self.pool is None:
569
+ return
570
+ # Disable access to the pool
571
+ old_pool, self.pool = self.pool, None
572
+
573
+ # Close all the HTTPConnections in the pool.
574
+ _close_pool_connections(old_pool)
575
+
576
+ def is_same_host(self, url: str) -> bool:
577
+ """
578
+ Check if the given ``url`` is a member of the same host as this
579
+ connection pool.
580
+ """
581
+ if url.startswith("/"):
582
+ return True
583
+
584
+ # TODO: Add optional support for socket.gethostbyname checking.
585
+ scheme, _, host, port, *_ = parse_url(url)
586
+ scheme = scheme or "http"
587
+ if host is not None:
588
+ host = _normalize_host(host, scheme=scheme)
589
+
590
+ # Use explicit default port for comparison when none is given
591
+ if self.port and not port:
592
+ port = port_by_scheme.get(scheme)
593
+ elif not self.port and port == port_by_scheme.get(scheme):
594
+ port = None
595
+
596
+ return (scheme, host, port) == (self.scheme, self.host, self.port)
597
+
598
+ def urlopen( # type: ignore[override]
599
+ self,
600
+ method: str,
601
+ url: str,
602
+ body: _TYPE_BODY | None = None,
603
+ headers: typing.Mapping[str, str] | None = None,
604
+ retries: Retry | bool | int | None = None,
605
+ redirect: bool = True,
606
+ assert_same_host: bool = True,
607
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
608
+ pool_timeout: int | None = None,
609
+ release_conn: bool | None = None,
610
+ chunked: bool = False,
611
+ body_pos: _TYPE_BODY_POSITION | None = None,
612
+ preload_content: bool = True,
613
+ decode_content: bool = True,
614
+ **response_kw: typing.Any,
615
+ ) -> BaseHTTPResponse:
616
+ """
617
+ Get a connection from the pool and perform an HTTP request. This is the
618
+ lowest level call for making a request, so you'll need to specify all
619
+ the raw details.
620
+
621
+ .. note::
622
+
623
+ More commonly, it's appropriate to use a convenience method
624
+ such as :meth:`request`.
625
+
626
+ .. note::
627
+
628
+ `release_conn` will only behave as expected if
629
+ `preload_content=False` because we want to make
630
+ `preload_content=False` the default behaviour someday soon without
631
+ breaking backwards compatibility.
632
+
633
+ :param method:
634
+ HTTP request method (such as GET, POST, PUT, etc.)
635
+
636
+ :param url:
637
+ The URL to perform the request on.
638
+
639
+ :param body:
640
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
641
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
642
+
643
+ :param headers:
644
+ Dictionary of custom headers to send, such as User-Agent,
645
+ If-None-Match, etc. If None, pool headers are used. If provided,
646
+ these headers completely replace any pool-specific headers.
647
+
648
+ :param retries:
649
+ Configure the number of retries to allow before raising a
650
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
651
+
652
+ If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
653
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
654
+ over different types of retries.
655
+ Pass an integer number to retry connection errors that many times,
656
+ but no other types of errors. Pass zero to never retry.
657
+
658
+ If ``False``, then retries are disabled and any exception is raised
659
+ immediately. Also, instead of raising a MaxRetryError on redirects,
660
+ the redirect response will be returned.
661
+
662
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
663
+
664
+ :param redirect:
665
+ If True, automatically handle redirects (status codes 301, 302,
666
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
667
+ will disable redirect, too.
668
+
669
+ :param assert_same_host:
670
+ If ``True``, will make sure that the host of the pool requests is
671
+ consistent else will raise HostChangedError. When ``False``, you can
672
+ use the pool on an HTTP proxy and request foreign hosts.
673
+
674
+ :param timeout:
675
+ If specified, overrides the default timeout for this one
676
+ request. It may be a float (in seconds) or an instance of
677
+ :class:`urllib3.util.Timeout`.
678
+
679
+ :param pool_timeout:
680
+ If set and the pool is set to block=True, then this method will
681
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
682
+ connection is available within the time period.
683
+
684
+ :param bool preload_content:
685
+ If True, the response's body will be preloaded into memory.
686
+
687
+ :param bool decode_content:
688
+ If True, will attempt to decode the body based on the
689
+ 'content-encoding' header.
690
+
691
+ :param release_conn:
692
+ If False, then the urlopen call will not release the connection
693
+ back into the pool once a response is received (but will release if
694
+ you read the entire contents of the response such as when
695
+ `preload_content=True`). This is useful if you're not preloading
696
+ the response's content immediately. You will need to call
697
+ ``r.release_conn()`` on the response ``r`` to return the connection
698
+ back into the pool. If None, it takes the value of ``preload_content``
699
+ which defaults to ``True``.
700
+
701
+ :param bool chunked:
702
+ If True, urllib3 will send the body using chunked transfer
703
+ encoding. Otherwise, urllib3 will send the body using the standard
704
+ content-length form. Defaults to False.
705
+
706
+ :param int body_pos:
707
+ Position to seek to in file-like body in the event of a retry or
708
+ redirect. Typically this won't need to be set because urllib3 will
709
+ auto-populate the value when needed.
710
+ """
711
+ parsed_url = parse_url(url)
712
+ destination_scheme = parsed_url.scheme
713
+
714
+ if headers is None:
715
+ headers = self.headers
716
+
717
+ if not isinstance(retries, Retry):
718
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
719
+
720
+ if release_conn is None:
721
+ release_conn = preload_content
722
+
723
+ # Check host
724
+ if assert_same_host and not self.is_same_host(url):
725
+ raise HostChangedError(self, url, retries)
726
+
727
+ # Ensure that the URL we're connecting to is properly encoded
728
+ if url.startswith("/"):
729
+ url = to_str(_encode_target(url))
730
+ else:
731
+ url = to_str(parsed_url.url)
732
+
733
+ conn = None
734
+
735
+ # Track whether `conn` needs to be released before
736
+ # returning/raising/recursing. Update this variable if necessary, and
737
+ # leave `release_conn` constant throughout the function. That way, if
738
+ # the function recurses, the original value of `release_conn` will be
739
+ # passed down into the recursive call, and its value will be respected.
740
+ #
741
+ # See issue #651 [1] for details.
742
+ #
743
+ # [1] <https://github.com/urllib3/urllib3/issues/651>
744
+ release_this_conn = release_conn
745
+
746
+ http_tunnel_required = connection_requires_http_tunnel(
747
+ self.proxy, self.proxy_config, destination_scheme
748
+ )
749
+
750
+ # Merge the proxy headers. Only done when not using HTTP CONNECT. We
751
+ # have to copy the headers dict so we can safely change it without those
752
+ # changes being reflected in anyone else's copy.
753
+ if not http_tunnel_required:
754
+ headers = headers.copy() # type: ignore[attr-defined]
755
+ headers.update(self.proxy_headers) # type: ignore[union-attr]
756
+
757
+ # Must keep the exception bound to a separate variable or else Python 3
758
+ # complains about UnboundLocalError.
759
+ err = None
760
+
761
+ # Keep track of whether we cleanly exited the except block. This
762
+ # ensures we do proper cleanup in finally.
763
+ clean_exit = False
764
+
765
+ # Rewind body position, if needed. Record current position
766
+ # for future rewinds in the event of a redirect/retry.
767
+ body_pos = set_file_position(body, body_pos)
768
+
769
+ try:
770
+ # Request a connection from the queue.
771
+ timeout_obj = self._get_timeout(timeout)
772
+ conn = self._get_conn(timeout=pool_timeout)
773
+
774
+ conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
775
+
776
+ # Is this a closed/new connection that requires CONNECT tunnelling?
777
+ if self.proxy is not None and http_tunnel_required and conn.is_closed:
778
+ try:
779
+ self._prepare_proxy(conn)
780
+ except (BaseSSLError, OSError, SocketTimeout) as e:
781
+ self._raise_timeout(
782
+ err=e, url=self.proxy.url, timeout_value=conn.timeout
783
+ )
784
+ raise
785
+
786
+ # If we're going to release the connection in ``finally:``, then
787
+ # the response doesn't need to know about the connection. Otherwise
788
+ # it will also try to release it and we'll have a double-release
789
+ # mess.
790
+ response_conn = conn if not release_conn else None
791
+
792
+ # Make the request on the HTTPConnection object
793
+ response = self._make_request(
794
+ conn,
795
+ method,
796
+ url,
797
+ timeout=timeout_obj,
798
+ body=body,
799
+ headers=headers,
800
+ chunked=chunked,
801
+ retries=retries,
802
+ response_conn=response_conn,
803
+ preload_content=preload_content,
804
+ decode_content=decode_content,
805
+ **response_kw,
806
+ )
807
+
808
+ # Everything went great!
809
+ clean_exit = True
810
+
811
+ except EmptyPoolError:
812
+ # Didn't get a connection from the pool, no need to clean up
813
+ clean_exit = True
814
+ release_this_conn = False
815
+ raise
816
+
817
+ except (
818
+ TimeoutError,
819
+ HTTPException,
820
+ OSError,
821
+ ProtocolError,
822
+ BaseSSLError,
823
+ SSLError,
824
+ CertificateError,
825
+ ProxyError,
826
+ ) as e:
827
+ # Discard the connection for these exceptions. It will be
828
+ # replaced during the next _get_conn() call.
829
+ clean_exit = False
830
+ new_e: Exception = e
831
+ if isinstance(e, (BaseSSLError, CertificateError)):
832
+ new_e = SSLError(e)
833
+ if isinstance(
834
+ new_e,
835
+ (
836
+ OSError,
837
+ NewConnectionError,
838
+ TimeoutError,
839
+ SSLError,
840
+ HTTPException,
841
+ ),
842
+ ) and (conn and conn.proxy and not conn.has_connected_to_proxy):
843
+ new_e = _wrap_proxy_error(new_e, conn.proxy.scheme)
844
+ elif isinstance(new_e, (OSError, HTTPException)):
845
+ new_e = ProtocolError("Connection aborted.", new_e)
846
+
847
+ retries = retries.increment(
848
+ method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2]
849
+ )
850
+ retries.sleep()
851
+
852
+ # Keep track of the error for the retry warning.
853
+ err = e
854
+
855
+ finally:
856
+ if not clean_exit:
857
+ # We hit some kind of exception, handled or otherwise. We need
858
+ # to throw the connection away unless explicitly told not to.
859
+ # Close the connection, set the variable to None, and make sure
860
+ # we put the None back in the pool to avoid leaking it.
861
+ if conn:
862
+ conn.close()
863
+ conn = None
864
+ release_this_conn = True
865
+
866
+ if release_this_conn:
867
+ # Put the connection back to be reused. If the connection is
868
+ # expired then it will be None, which will get replaced with a
869
+ # fresh connection during _get_conn.
870
+ self._put_conn(conn)
871
+
872
+ if not conn:
873
+ # Try again
874
+ log.warning(
875
+ "Retrying (%r) after connection broken by '%r': %s", retries, err, url
876
+ )
877
+ return self.urlopen(
878
+ method,
879
+ url,
880
+ body,
881
+ headers,
882
+ retries,
883
+ redirect,
884
+ assert_same_host,
885
+ timeout=timeout,
886
+ pool_timeout=pool_timeout,
887
+ release_conn=release_conn,
888
+ chunked=chunked,
889
+ body_pos=body_pos,
890
+ preload_content=preload_content,
891
+ decode_content=decode_content,
892
+ **response_kw,
893
+ )
894
+
895
+ # Handle redirect?
896
+ redirect_location = redirect and response.get_redirect_location()
897
+ if redirect_location:
898
+ if response.status == 303:
899
+ # Change the method according to RFC 9110, Section 15.4.4.
900
+ method = "GET"
901
+ # And lose the body not to transfer anything sensitive.
902
+ body = None
903
+ headers = HTTPHeaderDict(headers)._prepare_for_method_change()
904
+
905
+ try:
906
+ retries = retries.increment(method, url, response=response, _pool=self)
907
+ except MaxRetryError:
908
+ if retries.raise_on_redirect:
909
+ response.drain_conn()
910
+ raise
911
+ return response
912
+
913
+ response.drain_conn()
914
+ retries.sleep_for_retry(response)
915
+ log.debug("Redirecting %s -> %s", url, redirect_location)
916
+ return self.urlopen(
917
+ method,
918
+ redirect_location,
919
+ body,
920
+ headers,
921
+ retries=retries,
922
+ redirect=redirect,
923
+ assert_same_host=assert_same_host,
924
+ timeout=timeout,
925
+ pool_timeout=pool_timeout,
926
+ release_conn=release_conn,
927
+ chunked=chunked,
928
+ body_pos=body_pos,
929
+ preload_content=preload_content,
930
+ decode_content=decode_content,
931
+ **response_kw,
932
+ )
933
+
934
+ # Check if we should retry the HTTP response.
935
+ has_retry_after = bool(response.headers.get("Retry-After"))
936
+ if retries.is_retry(method, response.status, has_retry_after):
937
+ try:
938
+ retries = retries.increment(method, url, response=response, _pool=self)
939
+ except MaxRetryError:
940
+ if retries.raise_on_status:
941
+ response.drain_conn()
942
+ raise
943
+ return response
944
+
945
+ response.drain_conn()
946
+ retries.sleep(response)
947
+ log.debug("Retry: %s", url)
948
+ return self.urlopen(
949
+ method,
950
+ url,
951
+ body,
952
+ headers,
953
+ retries=retries,
954
+ redirect=redirect,
955
+ assert_same_host=assert_same_host,
956
+ timeout=timeout,
957
+ pool_timeout=pool_timeout,
958
+ release_conn=release_conn,
959
+ chunked=chunked,
960
+ body_pos=body_pos,
961
+ preload_content=preload_content,
962
+ decode_content=decode_content,
963
+ **response_kw,
964
+ )
965
+
966
+ return response
967
+
968
+
969
+ class HTTPSConnectionPool(HTTPConnectionPool):
970
+ """
971
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
972
+
973
+ :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
974
+ ``assert_hostname`` and ``host`` in this order to verify connections.
975
+ If ``assert_hostname`` is False, no verification is done.
976
+
977
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
978
+ ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
979
+ is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
980
+ the connection socket into an SSL socket.
981
+ """
982
+
983
+ scheme = "https"
984
+ ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection
985
+
986
+ def __init__(
987
+ self,
988
+ host: str,
989
+ port: int | None = None,
990
+ timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT,
991
+ maxsize: int = 1,
992
+ block: bool = False,
993
+ headers: typing.Mapping[str, str] | None = None,
994
+ retries: Retry | bool | int | None = None,
995
+ _proxy: Url | None = None,
996
+ _proxy_headers: typing.Mapping[str, str] | None = None,
997
+ key_file: str | None = None,
998
+ cert_file: str | None = None,
999
+ cert_reqs: int | str | None = None,
1000
+ key_password: str | None = None,
1001
+ ca_certs: str | None = None,
1002
+ ssl_version: int | str | None = None,
1003
+ ssl_minimum_version: ssl.TLSVersion | None = None,
1004
+ ssl_maximum_version: ssl.TLSVersion | None = None,
1005
+ assert_hostname: str | Literal[False] | None = None,
1006
+ assert_fingerprint: str | None = None,
1007
+ ca_cert_dir: str | None = None,
1008
+ **conn_kw: typing.Any,
1009
+ ) -> None:
1010
+ super().__init__(
1011
+ host,
1012
+ port,
1013
+ timeout,
1014
+ maxsize,
1015
+ block,
1016
+ headers,
1017
+ retries,
1018
+ _proxy,
1019
+ _proxy_headers,
1020
+ **conn_kw,
1021
+ )
1022
+
1023
+ self.key_file = key_file
1024
+ self.cert_file = cert_file
1025
+ self.cert_reqs = cert_reqs
1026
+ self.key_password = key_password
1027
+ self.ca_certs = ca_certs
1028
+ self.ca_cert_dir = ca_cert_dir
1029
+ self.ssl_version = ssl_version
1030
+ self.ssl_minimum_version = ssl_minimum_version
1031
+ self.ssl_maximum_version = ssl_maximum_version
1032
+ self.assert_hostname = assert_hostname
1033
+ self.assert_fingerprint = assert_fingerprint
1034
+
1035
+ def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override]
1036
+ """Establishes a tunnel connection through HTTP CONNECT."""
1037
+ if self.proxy and self.proxy.scheme == "https":
1038
+ tunnel_scheme = "https"
1039
+ else:
1040
+ tunnel_scheme = "http"
1041
+
1042
+ conn.set_tunnel(
1043
+ scheme=tunnel_scheme,
1044
+ host=self._tunnel_host,
1045
+ port=self.port,
1046
+ headers=self.proxy_headers,
1047
+ )
1048
+ conn.connect()
1049
+
1050
+ def _new_conn(self) -> BaseHTTPSConnection:
1051
+ """
1052
+ Return a fresh :class:`urllib3.connection.HTTPConnection`.
1053
+ """
1054
+ self.num_connections += 1
1055
+ log.debug(
1056
+ "Starting new HTTPS connection (%d): %s:%s",
1057
+ self.num_connections,
1058
+ self.host,
1059
+ self.port or "443",
1060
+ )
1061
+
1062
+ if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap]
1063
+ raise ImportError(
1064
+ "Can't connect to HTTPS URL because the SSL module is not available."
1065
+ )
1066
+
1067
+ actual_host: str = self.host
1068
+ actual_port = self.port
1069
+ if self.proxy is not None and self.proxy.host is not None:
1070
+ actual_host = self.proxy.host
1071
+ actual_port = self.proxy.port
1072
+
1073
+ return self.ConnectionCls(
1074
+ host=actual_host,
1075
+ port=actual_port,
1076
+ timeout=self.timeout.connect_timeout,
1077
+ cert_file=self.cert_file,
1078
+ key_file=self.key_file,
1079
+ key_password=self.key_password,
1080
+ cert_reqs=self.cert_reqs,
1081
+ ca_certs=self.ca_certs,
1082
+ ca_cert_dir=self.ca_cert_dir,
1083
+ assert_hostname=self.assert_hostname,
1084
+ assert_fingerprint=self.assert_fingerprint,
1085
+ ssl_version=self.ssl_version,
1086
+ ssl_minimum_version=self.ssl_minimum_version,
1087
+ ssl_maximum_version=self.ssl_maximum_version,
1088
+ **self.conn_kw,
1089
+ )
1090
+
1091
+ def _validate_conn(self, conn: BaseHTTPConnection) -> None:
1092
+ """
1093
+ Called right before a request is made, after the socket is created.
1094
+ """
1095
+ super()._validate_conn(conn)
1096
+
1097
+ # Force connect early to allow us to validate the connection.
1098
+ if conn.is_closed:
1099
+ conn.connect()
1100
+
1101
+ # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791
1102
+ if not conn.is_verified and not conn.proxy_is_verified:
1103
+ warnings.warn(
1104
+ (
1105
+ f"Unverified HTTPS request is being made to host '{conn.host}'. "
1106
+ "Adding certificate verification is strongly advised. See: "
1107
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
1108
+ "#tls-warnings"
1109
+ ),
1110
+ InsecureRequestWarning,
1111
+ )
1112
+
1113
+
1114
+ def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool:
1115
+ """
1116
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
1117
+
1118
+ This is a shortcut for not having to parse out the scheme, host, and port
1119
+ of the url before creating an :class:`.ConnectionPool` instance.
1120
+
1121
+ :param url:
1122
+ Absolute URL string that must include the scheme. Port is optional.
1123
+
1124
+ :param \\**kw:
1125
+ Passes additional parameters to the constructor of the appropriate
1126
+ :class:`.ConnectionPool`. Useful for specifying things like
1127
+ timeout, maxsize, headers, etc.
1128
+
1129
+ Example::
1130
+
1131
+ >>> conn = connection_from_url('http://google.com/')
1132
+ >>> r = conn.request('GET', '/')
1133
+ """
1134
+ scheme, _, host, port, *_ = parse_url(url)
1135
+ scheme = scheme or "http"
1136
+ port = port or port_by_scheme.get(scheme, 80)
1137
+ if scheme == "https":
1138
+ return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type]
1139
+ else:
1140
+ return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type]
1141
+
1142
+
1143
+ @typing.overload
1144
+ def _normalize_host(host: None, scheme: str | None) -> None:
1145
+ ...
1146
+
1147
+
1148
+ @typing.overload
1149
+ def _normalize_host(host: str, scheme: str | None) -> str:
1150
+ ...
1151
+
1152
+
1153
+ def _normalize_host(host: str | None, scheme: str | None) -> str | None:
1154
+ """
1155
+ Normalize hosts for comparisons and use with sockets.
1156
+ """
1157
+
1158
+ host = normalize_host(host, scheme)
1159
+
1160
+ # httplib doesn't like it when we include brackets in IPv6 addresses
1161
+ # Specifically, if we include brackets but also pass the port then
1162
+ # httplib crazily doubles up the square brackets on the Host header.
1163
+ # Instead, we need to make sure we never pass ``None`` as the port.
1164
+ # However, for backward compatibility reasons we can't actually
1165
+ # *assert* that. See http://bugs.python.org/issue28539
1166
+ if host and host.startswith("[") and host.endswith("]"):
1167
+ host = host[1:-1]
1168
+ return host
1169
+
1170
+
1171
+ def _url_from_pool(
1172
+ pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None
1173
+ ) -> str:
1174
+ """Returns the URL from a given connection pool. This is mainly used for testing and logging."""
1175
+ return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url
1176
+
1177
+
1178
+ def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None:
1179
+ """Drains a queue of connections and closes each one."""
1180
+ try:
1181
+ while True:
1182
+ conn = pool.get(block=False)
1183
+ if conn:
1184
+ conn.close()
1185
+ except queue.Empty:
1186
+ pass # Done.
venv/lib/python3.10/site-packages/urllib3/exceptions.py ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import socket
4
+ import typing
5
+ import warnings
6
+ from email.errors import MessageDefect
7
+ from http.client import IncompleteRead as httplib_IncompleteRead
8
+
9
+ if typing.TYPE_CHECKING:
10
+ from .connection import HTTPConnection
11
+ from .connectionpool import ConnectionPool
12
+ from .response import HTTPResponse
13
+ from .util.retry import Retry
14
+
15
+ # Base Exceptions
16
+
17
+
18
+ class HTTPError(Exception):
19
+ """Base exception used by this module."""
20
+
21
+
22
+ class HTTPWarning(Warning):
23
+ """Base warning used by this module."""
24
+
25
+
26
+ _TYPE_REDUCE_RESULT = typing.Tuple[
27
+ typing.Callable[..., object], typing.Tuple[object, ...]
28
+ ]
29
+
30
+
31
+ class PoolError(HTTPError):
32
+ """Base exception for errors caused within a pool."""
33
+
34
+ def __init__(self, pool: ConnectionPool, message: str) -> None:
35
+ self.pool = pool
36
+ super().__init__(f"{pool}: {message}")
37
+
38
+ def __reduce__(self) -> _TYPE_REDUCE_RESULT:
39
+ # For pickling purposes.
40
+ return self.__class__, (None, None)
41
+
42
+
43
+ class RequestError(PoolError):
44
+ """Base exception for PoolErrors that have associated URLs."""
45
+
46
+ def __init__(self, pool: ConnectionPool, url: str, message: str) -> None:
47
+ self.url = url
48
+ super().__init__(pool, message)
49
+
50
+ def __reduce__(self) -> _TYPE_REDUCE_RESULT:
51
+ # For pickling purposes.
52
+ return self.__class__, (None, self.url, None)
53
+
54
+
55
+ class SSLError(HTTPError):
56
+ """Raised when SSL certificate fails in an HTTPS connection."""
57
+
58
+
59
+ class ProxyError(HTTPError):
60
+ """Raised when the connection to a proxy fails."""
61
+
62
+ # The original error is also available as __cause__.
63
+ original_error: Exception
64
+
65
+ def __init__(self, message: str, error: Exception) -> None:
66
+ super().__init__(message, error)
67
+ self.original_error = error
68
+
69
+
70
+ class DecodeError(HTTPError):
71
+ """Raised when automatic decoding based on Content-Type fails."""
72
+
73
+
74
+ class ProtocolError(HTTPError):
75
+ """Raised when something unexpected happens mid-request/response."""
76
+
77
+
78
+ #: Renamed to ProtocolError but aliased for backwards compatibility.
79
+ ConnectionError = ProtocolError
80
+
81
+
82
+ # Leaf Exceptions
83
+
84
+
85
+ class MaxRetryError(RequestError):
86
+ """Raised when the maximum number of retries is exceeded.
87
+
88
+ :param pool: The connection pool
89
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
90
+ :param str url: The requested Url
91
+ :param reason: The underlying error
92
+ :type reason: :class:`Exception`
93
+
94
+ """
95
+
96
+ def __init__(
97
+ self, pool: ConnectionPool, url: str, reason: Exception | None = None
98
+ ) -> None:
99
+ self.reason = reason
100
+
101
+ message = f"Max retries exceeded with url: {url} (Caused by {reason!r})"
102
+
103
+ super().__init__(pool, url, message)
104
+
105
+
106
+ class HostChangedError(RequestError):
107
+ """Raised when an existing pool gets a request for a foreign host."""
108
+
109
+ def __init__(
110
+ self, pool: ConnectionPool, url: str, retries: Retry | int = 3
111
+ ) -> None:
112
+ message = f"Tried to open a foreign host with url: {url}"
113
+ super().__init__(pool, url, message)
114
+ self.retries = retries
115
+
116
+
117
+ class TimeoutStateError(HTTPError):
118
+ """Raised when passing an invalid state to a timeout"""
119
+
120
+
121
+ class TimeoutError(HTTPError):
122
+ """Raised when a socket timeout error occurs.
123
+
124
+ Catching this error will catch both :exc:`ReadTimeoutErrors
125
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
126
+ """
127
+
128
+
129
+ class ReadTimeoutError(TimeoutError, RequestError):
130
+ """Raised when a socket timeout occurs while receiving data from a server"""
131
+
132
+
133
+ # This timeout error does not have a URL attached and needs to inherit from the
134
+ # base HTTPError
135
+ class ConnectTimeoutError(TimeoutError):
136
+ """Raised when a socket timeout occurs while connecting to a server"""
137
+
138
+
139
+ class NewConnectionError(ConnectTimeoutError, HTTPError):
140
+ """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
141
+
142
+ def __init__(self, conn: HTTPConnection, message: str) -> None:
143
+ self.conn = conn
144
+ super().__init__(f"{conn}: {message}")
145
+
146
+ @property
147
+ def pool(self) -> HTTPConnection:
148
+ warnings.warn(
149
+ "The 'pool' property is deprecated and will be removed "
150
+ "in urllib3 v2.1.0. Use 'conn' instead.",
151
+ DeprecationWarning,
152
+ stacklevel=2,
153
+ )
154
+
155
+ return self.conn
156
+
157
+
158
+ class NameResolutionError(NewConnectionError):
159
+ """Raised when host name resolution fails."""
160
+
161
+ def __init__(self, host: str, conn: HTTPConnection, reason: socket.gaierror):
162
+ message = f"Failed to resolve '{host}' ({reason})"
163
+ super().__init__(conn, message)
164
+
165
+
166
+ class EmptyPoolError(PoolError):
167
+ """Raised when a pool runs out of connections and no more are allowed."""
168
+
169
+
170
+ class FullPoolError(PoolError):
171
+ """Raised when we try to add a connection to a full pool in blocking mode."""
172
+
173
+
174
+ class ClosedPoolError(PoolError):
175
+ """Raised when a request enters a pool after the pool has been closed."""
176
+
177
+
178
+ class LocationValueError(ValueError, HTTPError):
179
+ """Raised when there is something wrong with a given URL input."""
180
+
181
+
182
+ class LocationParseError(LocationValueError):
183
+ """Raised when get_host or similar fails to parse the URL input."""
184
+
185
+ def __init__(self, location: str) -> None:
186
+ message = f"Failed to parse: {location}"
187
+ super().__init__(message)
188
+
189
+ self.location = location
190
+
191
+
192
+ class URLSchemeUnknown(LocationValueError):
193
+ """Raised when a URL input has an unsupported scheme."""
194
+
195
+ def __init__(self, scheme: str):
196
+ message = f"Not supported URL scheme {scheme}"
197
+ super().__init__(message)
198
+
199
+ self.scheme = scheme
200
+
201
+
202
+ class ResponseError(HTTPError):
203
+ """Used as a container for an error reason supplied in a MaxRetryError."""
204
+
205
+ GENERIC_ERROR = "too many error responses"
206
+ SPECIFIC_ERROR = "too many {status_code} error responses"
207
+
208
+
209
+ class SecurityWarning(HTTPWarning):
210
+ """Warned when performing security reducing actions"""
211
+
212
+
213
+ class InsecureRequestWarning(SecurityWarning):
214
+ """Warned when making an unverified HTTPS request."""
215
+
216
+
217
+ class NotOpenSSLWarning(SecurityWarning):
218
+ """Warned when using unsupported SSL library"""
219
+
220
+
221
+ class SystemTimeWarning(SecurityWarning):
222
+ """Warned when system time is suspected to be wrong"""
223
+
224
+
225
+ class InsecurePlatformWarning(SecurityWarning):
226
+ """Warned when certain TLS/SSL configuration is not available on a platform."""
227
+
228
+
229
+ class DependencyWarning(HTTPWarning):
230
+ """
231
+ Warned when an attempt is made to import a module with missing optional
232
+ dependencies.
233
+ """
234
+
235
+
236
+ class ResponseNotChunked(ProtocolError, ValueError):
237
+ """Response needs to be chunked in order to read it as chunks."""
238
+
239
+
240
+ class BodyNotHttplibCompatible(HTTPError):
241
+ """
242
+ Body should be :class:`http.client.HTTPResponse` like
243
+ (have an fp attribute which returns raw chunks) for read_chunked().
244
+ """
245
+
246
+
247
+ class IncompleteRead(HTTPError, httplib_IncompleteRead):
248
+ """
249
+ Response length doesn't match expected Content-Length
250
+
251
+ Subclass of :class:`http.client.IncompleteRead` to allow int value
252
+ for ``partial`` to avoid creating large objects on streamed reads.
253
+ """
254
+
255
+ partial: int # type: ignore[assignment]
256
+ expected: int
257
+
258
+ def __init__(self, partial: int, expected: int) -> None:
259
+ self.partial = partial
260
+ self.expected = expected
261
+
262
+ def __repr__(self) -> str:
263
+ return "IncompleteRead(%i bytes read, %i more expected)" % (
264
+ self.partial,
265
+ self.expected,
266
+ )
267
+
268
+
269
+ class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
270
+ """Invalid chunk length in a chunked response."""
271
+
272
+ def __init__(self, response: HTTPResponse, length: bytes) -> None:
273
+ self.partial: int = response.tell() # type: ignore[assignment]
274
+ self.expected: int | None = response.length_remaining
275
+ self.response = response
276
+ self.length = length
277
+
278
+ def __repr__(self) -> str:
279
+ return "InvalidChunkLength(got length %r, %i bytes read)" % (
280
+ self.length,
281
+ self.partial,
282
+ )
283
+
284
+
285
+ class InvalidHeader(HTTPError):
286
+ """The header provided was somehow invalid."""
287
+
288
+
289
+ class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
290
+ """ProxyManager does not support the supplied scheme"""
291
+
292
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
293
+
294
+ def __init__(self, scheme: str | None) -> None:
295
+ # 'localhost' is here because our URL parser parses
296
+ # localhost:8080 -> scheme=localhost, remove if we fix this.
297
+ if scheme == "localhost":
298
+ scheme = None
299
+ if scheme is None:
300
+ message = "Proxy URL had no scheme, should start with http:// or https://"
301
+ else:
302
+ message = f"Proxy URL had unsupported scheme {scheme}, should use http:// or https://"
303
+ super().__init__(message)
304
+
305
+
306
+ class ProxySchemeUnsupported(ValueError):
307
+ """Fetching HTTPS resources through HTTPS proxies is unsupported"""
308
+
309
+
310
+ class HeaderParsingError(HTTPError):
311
+ """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
312
+
313
+ def __init__(
314
+ self, defects: list[MessageDefect], unparsed_data: bytes | str | None
315
+ ) -> None:
316
+ message = f"{defects or 'Unknown'}, unparsed data: {unparsed_data!r}"
317
+ super().__init__(message)
318
+
319
+
320
+ class UnrewindableBodyError(HTTPError):
321
+ """urllib3 encountered an error when trying to rewind a body"""
venv/lib/python3.10/site-packages/urllib3/fields.py ADDED
@@ -0,0 +1,341 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import email.utils
4
+ import mimetypes
5
+ import typing
6
+
7
+ _TYPE_FIELD_VALUE = typing.Union[str, bytes]
8
+ _TYPE_FIELD_VALUE_TUPLE = typing.Union[
9
+ _TYPE_FIELD_VALUE,
10
+ typing.Tuple[str, _TYPE_FIELD_VALUE],
11
+ typing.Tuple[str, _TYPE_FIELD_VALUE, str],
12
+ ]
13
+
14
+
15
+ def guess_content_type(
16
+ filename: str | None, default: str = "application/octet-stream"
17
+ ) -> str:
18
+ """
19
+ Guess the "Content-Type" of a file.
20
+
21
+ :param filename:
22
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
23
+ :param default:
24
+ If no "Content-Type" can be guessed, default to `default`.
25
+ """
26
+ if filename:
27
+ return mimetypes.guess_type(filename)[0] or default
28
+ return default
29
+
30
+
31
+ def format_header_param_rfc2231(name: str, value: _TYPE_FIELD_VALUE) -> str:
32
+ """
33
+ Helper function to format and quote a single header parameter using the
34
+ strategy defined in RFC 2231.
35
+
36
+ Particularly useful for header parameters which might contain
37
+ non-ASCII values, like file names. This follows
38
+ `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
39
+
40
+ :param name:
41
+ The name of the parameter, a string expected to be ASCII only.
42
+ :param value:
43
+ The value of the parameter, provided as ``bytes`` or `str``.
44
+ :returns:
45
+ An RFC-2231-formatted unicode string.
46
+
47
+ .. deprecated:: 2.0.0
48
+ Will be removed in urllib3 v2.1.0. This is not valid for
49
+ ``multipart/form-data`` header parameters.
50
+ """
51
+ import warnings
52
+
53
+ warnings.warn(
54
+ "'format_header_param_rfc2231' is deprecated and will be "
55
+ "removed in urllib3 v2.1.0. This is not valid for "
56
+ "multipart/form-data header parameters.",
57
+ DeprecationWarning,
58
+ stacklevel=2,
59
+ )
60
+
61
+ if isinstance(value, bytes):
62
+ value = value.decode("utf-8")
63
+
64
+ if not any(ch in value for ch in '"\\\r\n'):
65
+ result = f'{name}="{value}"'
66
+ try:
67
+ result.encode("ascii")
68
+ except (UnicodeEncodeError, UnicodeDecodeError):
69
+ pass
70
+ else:
71
+ return result
72
+
73
+ value = email.utils.encode_rfc2231(value, "utf-8")
74
+ value = f"{name}*={value}"
75
+
76
+ return value
77
+
78
+
79
+ def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
80
+ """
81
+ Format and quote a single multipart header parameter.
82
+
83
+ This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching
84
+ the behavior of current browser and curl versions. Values are
85
+ assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are
86
+ percent encoded.
87
+
88
+ .. _WHATWG HTML Standard:
89
+ https://html.spec.whatwg.org/multipage/
90
+ form-control-infrastructure.html#multipart-form-data
91
+
92
+ :param name:
93
+ The name of the parameter, an ASCII-only ``str``.
94
+ :param value:
95
+ The value of the parameter, a ``str`` or UTF-8 encoded
96
+ ``bytes``.
97
+ :returns:
98
+ A string ``name="value"`` with the escaped value.
99
+
100
+ .. versionchanged:: 2.0.0
101
+ Matches the WHATWG HTML Standard as of 2021/06/10. Control
102
+ characters are no longer percent encoded.
103
+
104
+ .. versionchanged:: 2.0.0
105
+ Renamed from ``format_header_param_html5`` and
106
+ ``format_header_param``. The old names will be removed in
107
+ urllib3 v2.1.0.
108
+ """
109
+ if isinstance(value, bytes):
110
+ value = value.decode("utf-8")
111
+
112
+ # percent encode \n \r "
113
+ value = value.translate({10: "%0A", 13: "%0D", 34: "%22"})
114
+ return f'{name}="{value}"'
115
+
116
+
117
+ def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str:
118
+ """
119
+ .. deprecated:: 2.0.0
120
+ Renamed to :func:`format_multipart_header_param`. Will be
121
+ removed in urllib3 v2.1.0.
122
+ """
123
+ import warnings
124
+
125
+ warnings.warn(
126
+ "'format_header_param_html5' has been renamed to "
127
+ "'format_multipart_header_param'. The old name will be "
128
+ "removed in urllib3 v2.1.0.",
129
+ DeprecationWarning,
130
+ stacklevel=2,
131
+ )
132
+ return format_multipart_header_param(name, value)
133
+
134
+
135
+ def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
136
+ """
137
+ .. deprecated:: 2.0.0
138
+ Renamed to :func:`format_multipart_header_param`. Will be
139
+ removed in urllib3 v2.1.0.
140
+ """
141
+ import warnings
142
+
143
+ warnings.warn(
144
+ "'format_header_param' has been renamed to "
145
+ "'format_multipart_header_param'. The old name will be "
146
+ "removed in urllib3 v2.1.0.",
147
+ DeprecationWarning,
148
+ stacklevel=2,
149
+ )
150
+ return format_multipart_header_param(name, value)
151
+
152
+
153
+ class RequestField:
154
+ """
155
+ A data container for request body parameters.
156
+
157
+ :param name:
158
+ The name of this request field. Must be unicode.
159
+ :param data:
160
+ The data/value body.
161
+ :param filename:
162
+ An optional filename of the request field. Must be unicode.
163
+ :param headers:
164
+ An optional dict-like object of headers to initially use for the field.
165
+
166
+ .. versionchanged:: 2.0.0
167
+ The ``header_formatter`` parameter is deprecated and will
168
+ be removed in urllib3 v2.1.0.
169
+ """
170
+
171
+ def __init__(
172
+ self,
173
+ name: str,
174
+ data: _TYPE_FIELD_VALUE,
175
+ filename: str | None = None,
176
+ headers: typing.Mapping[str, str] | None = None,
177
+ header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None,
178
+ ):
179
+ self._name = name
180
+ self._filename = filename
181
+ self.data = data
182
+ self.headers: dict[str, str | None] = {}
183
+ if headers:
184
+ self.headers = dict(headers)
185
+
186
+ if header_formatter is not None:
187
+ import warnings
188
+
189
+ warnings.warn(
190
+ "The 'header_formatter' parameter is deprecated and "
191
+ "will be removed in urllib3 v2.1.0.",
192
+ DeprecationWarning,
193
+ stacklevel=2,
194
+ )
195
+ self.header_formatter = header_formatter
196
+ else:
197
+ self.header_formatter = format_multipart_header_param
198
+
199
+ @classmethod
200
+ def from_tuples(
201
+ cls,
202
+ fieldname: str,
203
+ value: _TYPE_FIELD_VALUE_TUPLE,
204
+ header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None,
205
+ ) -> RequestField:
206
+ """
207
+ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
208
+
209
+ Supports constructing :class:`~urllib3.fields.RequestField` from
210
+ parameter of key/value strings AND key/filetuple. A filetuple is a
211
+ (filename, data, MIME type) tuple where the MIME type is optional.
212
+ For example::
213
+
214
+ 'foo': 'bar',
215
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
216
+ 'realfile': ('barfile.txt', open('realfile').read()),
217
+ 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
218
+ 'nonamefile': 'contents of nonamefile field',
219
+
220
+ Field names and filenames must be unicode.
221
+ """
222
+ filename: str | None
223
+ content_type: str | None
224
+ data: _TYPE_FIELD_VALUE
225
+
226
+ if isinstance(value, tuple):
227
+ if len(value) == 3:
228
+ filename, data, content_type = value
229
+ else:
230
+ filename, data = value
231
+ content_type = guess_content_type(filename)
232
+ else:
233
+ filename = None
234
+ content_type = None
235
+ data = value
236
+
237
+ request_param = cls(
238
+ fieldname, data, filename=filename, header_formatter=header_formatter
239
+ )
240
+ request_param.make_multipart(content_type=content_type)
241
+
242
+ return request_param
243
+
244
+ def _render_part(self, name: str, value: _TYPE_FIELD_VALUE) -> str:
245
+ """
246
+ Override this method to change how each multipart header
247
+ parameter is formatted. By default, this calls
248
+ :func:`format_multipart_header_param`.
249
+
250
+ :param name:
251
+ The name of the parameter, an ASCII-only ``str``.
252
+ :param value:
253
+ The value of the parameter, a ``str`` or UTF-8 encoded
254
+ ``bytes``.
255
+
256
+ :meta public:
257
+ """
258
+ return self.header_formatter(name, value)
259
+
260
+ def _render_parts(
261
+ self,
262
+ header_parts: (
263
+ dict[str, _TYPE_FIELD_VALUE | None]
264
+ | typing.Sequence[tuple[str, _TYPE_FIELD_VALUE | None]]
265
+ ),
266
+ ) -> str:
267
+ """
268
+ Helper function to format and quote a single header.
269
+
270
+ Useful for single headers that are composed of multiple items. E.g.,
271
+ 'Content-Disposition' fields.
272
+
273
+ :param header_parts:
274
+ A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
275
+ as `k1="v1"; k2="v2"; ...`.
276
+ """
277
+ iterable: typing.Iterable[tuple[str, _TYPE_FIELD_VALUE | None]]
278
+
279
+ parts = []
280
+ if isinstance(header_parts, dict):
281
+ iterable = header_parts.items()
282
+ else:
283
+ iterable = header_parts
284
+
285
+ for name, value in iterable:
286
+ if value is not None:
287
+ parts.append(self._render_part(name, value))
288
+
289
+ return "; ".join(parts)
290
+
291
+ def render_headers(self) -> str:
292
+ """
293
+ Renders the headers for this request field.
294
+ """
295
+ lines = []
296
+
297
+ sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
298
+ for sort_key in sort_keys:
299
+ if self.headers.get(sort_key, False):
300
+ lines.append(f"{sort_key}: {self.headers[sort_key]}")
301
+
302
+ for header_name, header_value in self.headers.items():
303
+ if header_name not in sort_keys:
304
+ if header_value:
305
+ lines.append(f"{header_name}: {header_value}")
306
+
307
+ lines.append("\r\n")
308
+ return "\r\n".join(lines)
309
+
310
+ def make_multipart(
311
+ self,
312
+ content_disposition: str | None = None,
313
+ content_type: str | None = None,
314
+ content_location: str | None = None,
315
+ ) -> None:
316
+ """
317
+ Makes this request field into a multipart request field.
318
+
319
+ This method overrides "Content-Disposition", "Content-Type" and
320
+ "Content-Location" headers to the request parameter.
321
+
322
+ :param content_disposition:
323
+ The 'Content-Disposition' of the request body. Defaults to 'form-data'
324
+ :param content_type:
325
+ The 'Content-Type' of the request body.
326
+ :param content_location:
327
+ The 'Content-Location' of the request body.
328
+
329
+ """
330
+ content_disposition = (content_disposition or "form-data") + "; ".join(
331
+ [
332
+ "",
333
+ self._render_parts(
334
+ (("name", self._name), ("filename", self._filename))
335
+ ),
336
+ ]
337
+ )
338
+
339
+ self.headers["Content-Disposition"] = content_disposition
340
+ self.headers["Content-Type"] = content_type
341
+ self.headers["Content-Location"] = content_location
venv/lib/python3.10/site-packages/urllib3/filepost.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import binascii
4
+ import codecs
5
+ import os
6
+ import typing
7
+ from io import BytesIO
8
+
9
+ from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField
10
+
11
+ writer = codecs.lookup("utf-8")[3]
12
+
13
+ _TYPE_FIELDS_SEQUENCE = typing.Sequence[
14
+ typing.Union[typing.Tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField]
15
+ ]
16
+ _TYPE_FIELDS = typing.Union[
17
+ _TYPE_FIELDS_SEQUENCE,
18
+ typing.Mapping[str, _TYPE_FIELD_VALUE_TUPLE],
19
+ ]
20
+
21
+
22
+ def choose_boundary() -> str:
23
+ """
24
+ Our embarrassingly-simple replacement for mimetools.choose_boundary.
25
+ """
26
+ return binascii.hexlify(os.urandom(16)).decode()
27
+
28
+
29
+ def iter_field_objects(fields: _TYPE_FIELDS) -> typing.Iterable[RequestField]:
30
+ """
31
+ Iterate over fields.
32
+
33
+ Supports list of (k, v) tuples and dicts, and lists of
34
+ :class:`~urllib3.fields.RequestField`.
35
+
36
+ """
37
+ iterable: typing.Iterable[RequestField | tuple[str, _TYPE_FIELD_VALUE_TUPLE]]
38
+
39
+ if isinstance(fields, typing.Mapping):
40
+ iterable = fields.items()
41
+ else:
42
+ iterable = fields
43
+
44
+ for field in iterable:
45
+ if isinstance(field, RequestField):
46
+ yield field
47
+ else:
48
+ yield RequestField.from_tuples(*field)
49
+
50
+
51
+ def encode_multipart_formdata(
52
+ fields: _TYPE_FIELDS, boundary: str | None = None
53
+ ) -> tuple[bytes, str]:
54
+ """
55
+ Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
56
+
57
+ :param fields:
58
+ Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
59
+ Values are processed by :func:`urllib3.fields.RequestField.from_tuples`.
60
+
61
+ :param boundary:
62
+ If not specified, then a random boundary will be generated using
63
+ :func:`urllib3.filepost.choose_boundary`.
64
+ """
65
+ body = BytesIO()
66
+ if boundary is None:
67
+ boundary = choose_boundary()
68
+
69
+ for field in iter_field_objects(fields):
70
+ body.write(f"--{boundary}\r\n".encode("latin-1"))
71
+
72
+ writer(body).write(field.render_headers())
73
+ data = field.data
74
+
75
+ if isinstance(data, int):
76
+ data = str(data) # Backwards compatibility
77
+
78
+ if isinstance(data, str):
79
+ writer(body).write(data)
80
+ else:
81
+ body.write(data)
82
+
83
+ body.write(b"\r\n")
84
+
85
+ body.write(f"--{boundary}--\r\n".encode("latin-1"))
86
+
87
+ content_type = f"multipart/form-data; boundary={boundary}"
88
+
89
+ return body.getvalue(), content_type
venv/lib/python3.10/site-packages/urllib3/http2.py ADDED
@@ -0,0 +1,229 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import threading
4
+ import types
5
+ import typing
6
+
7
+ import h2.config # type: ignore[import-untyped]
8
+ import h2.connection # type: ignore[import-untyped]
9
+ import h2.events # type: ignore[import-untyped]
10
+
11
+ import urllib3.connection
12
+ import urllib3.util.ssl_
13
+ from urllib3.response import BaseHTTPResponse
14
+
15
+ from ._collections import HTTPHeaderDict
16
+ from .connection import HTTPSConnection
17
+ from .connectionpool import HTTPSConnectionPool
18
+
19
+ orig_HTTPSConnection = HTTPSConnection
20
+
21
+ T = typing.TypeVar("T")
22
+
23
+
24
+ class _LockedObject(typing.Generic[T]):
25
+ """
26
+ A wrapper class that hides a specific object behind a lock.
27
+
28
+ The goal here is to provide a simple way to protect access to an object
29
+ that cannot safely be simultaneously accessed from multiple threads. The
30
+ intended use of this class is simple: take hold of it with a context
31
+ manager, which returns the protected object.
32
+ """
33
+
34
+ def __init__(self, obj: T):
35
+ self.lock = threading.RLock()
36
+ self._obj = obj
37
+
38
+ def __enter__(self) -> T:
39
+ self.lock.acquire()
40
+ return self._obj
41
+
42
+ def __exit__(
43
+ self,
44
+ exc_type: type[BaseException] | None,
45
+ exc_val: BaseException | None,
46
+ exc_tb: types.TracebackType | None,
47
+ ) -> None:
48
+ self.lock.release()
49
+
50
+
51
+ class HTTP2Connection(HTTPSConnection):
52
+ def __init__(
53
+ self, host: str, port: int | None = None, **kwargs: typing.Any
54
+ ) -> None:
55
+ self._h2_conn = self._new_h2_conn()
56
+ self._h2_stream: int | None = None
57
+ self._h2_headers: list[tuple[bytes, bytes]] = []
58
+
59
+ if "proxy" in kwargs or "proxy_config" in kwargs: # Defensive:
60
+ raise NotImplementedError("Proxies aren't supported with HTTP/2")
61
+
62
+ super().__init__(host, port, **kwargs)
63
+
64
+ def _new_h2_conn(self) -> _LockedObject[h2.connection.H2Connection]:
65
+ config = h2.config.H2Configuration(client_side=True)
66
+ return _LockedObject(h2.connection.H2Connection(config=config))
67
+
68
+ def connect(self) -> None:
69
+ super().connect()
70
+
71
+ with self._h2_conn as h2_conn:
72
+ h2_conn.initiate_connection()
73
+ self.sock.sendall(h2_conn.data_to_send())
74
+
75
+ def putrequest(
76
+ self,
77
+ method: str,
78
+ url: str,
79
+ skip_host: bool = False,
80
+ skip_accept_encoding: bool = False,
81
+ ) -> None:
82
+ with self._h2_conn as h2_conn:
83
+ self._request_url = url
84
+ self._h2_stream = h2_conn.get_next_available_stream_id()
85
+
86
+ if ":" in self.host:
87
+ authority = f"[{self.host}]:{self.port or 443}"
88
+ else:
89
+ authority = f"{self.host}:{self.port or 443}"
90
+
91
+ self._h2_headers.extend(
92
+ (
93
+ (b":scheme", b"https"),
94
+ (b":method", method.encode()),
95
+ (b":authority", authority.encode()),
96
+ (b":path", url.encode()),
97
+ )
98
+ )
99
+
100
+ def putheader(self, header: str, *values: str) -> None: # type: ignore[override]
101
+ for value in values:
102
+ self._h2_headers.append(
103
+ (header.encode("utf-8").lower(), value.encode("utf-8"))
104
+ )
105
+
106
+ def endheaders(self) -> None: # type: ignore[override]
107
+ with self._h2_conn as h2_conn:
108
+ h2_conn.send_headers(
109
+ stream_id=self._h2_stream,
110
+ headers=self._h2_headers,
111
+ end_stream=True,
112
+ )
113
+ if data_to_send := h2_conn.data_to_send():
114
+ self.sock.sendall(data_to_send)
115
+
116
+ def send(self, data: bytes) -> None: # type: ignore[override] # Defensive:
117
+ if not data:
118
+ return
119
+ raise NotImplementedError("Sending data isn't supported yet")
120
+
121
+ def getresponse( # type: ignore[override]
122
+ self,
123
+ ) -> HTTP2Response:
124
+ status = None
125
+ data = bytearray()
126
+ with self._h2_conn as h2_conn:
127
+ end_stream = False
128
+ while not end_stream:
129
+ # TODO: Arbitrary read value.
130
+ if received_data := self.sock.recv(65535):
131
+ events = h2_conn.receive_data(received_data)
132
+ for event in events:
133
+ if isinstance(event, h2.events.ResponseReceived):
134
+ headers = HTTPHeaderDict()
135
+ for header, value in event.headers:
136
+ if header == b":status":
137
+ status = int(value.decode())
138
+ else:
139
+ headers.add(
140
+ header.decode("ascii"), value.decode("ascii")
141
+ )
142
+
143
+ elif isinstance(event, h2.events.DataReceived):
144
+ data += event.data
145
+ h2_conn.acknowledge_received_data(
146
+ event.flow_controlled_length, event.stream_id
147
+ )
148
+
149
+ elif isinstance(event, h2.events.StreamEnded):
150
+ end_stream = True
151
+
152
+ if data_to_send := h2_conn.data_to_send():
153
+ self.sock.sendall(data_to_send)
154
+
155
+ # We always close to not have to handle connection management.
156
+ self.close()
157
+
158
+ assert status is not None
159
+ return HTTP2Response(
160
+ status=status,
161
+ headers=headers,
162
+ request_url=self._request_url,
163
+ data=bytes(data),
164
+ )
165
+
166
+ def close(self) -> None:
167
+ with self._h2_conn as h2_conn:
168
+ try:
169
+ h2_conn.close_connection()
170
+ if data := h2_conn.data_to_send():
171
+ self.sock.sendall(data)
172
+ except Exception:
173
+ pass
174
+
175
+ # Reset all our HTTP/2 connection state.
176
+ self._h2_conn = self._new_h2_conn()
177
+ self._h2_stream = None
178
+ self._h2_headers = []
179
+
180
+ super().close()
181
+
182
+
183
+ class HTTP2Response(BaseHTTPResponse):
184
+ # TODO: This is a woefully incomplete response object, but works for non-streaming.
185
+ def __init__(
186
+ self,
187
+ status: int,
188
+ headers: HTTPHeaderDict,
189
+ request_url: str,
190
+ data: bytes,
191
+ decode_content: bool = False, # TODO: support decoding
192
+ ) -> None:
193
+ super().__init__(
194
+ status=status,
195
+ headers=headers,
196
+ # Following CPython, we map HTTP versions to major * 10 + minor integers
197
+ version=20,
198
+ # No reason phrase in HTTP/2
199
+ reason=None,
200
+ decode_content=decode_content,
201
+ request_url=request_url,
202
+ )
203
+ self._data = data
204
+ self.length_remaining = 0
205
+
206
+ @property
207
+ def data(self) -> bytes:
208
+ return self._data
209
+
210
+ def get_redirect_location(self) -> None:
211
+ return None
212
+
213
+ def close(self) -> None:
214
+ pass
215
+
216
+
217
+ def inject_into_urllib3() -> None:
218
+ HTTPSConnectionPool.ConnectionCls = HTTP2Connection
219
+ urllib3.connection.HTTPSConnection = HTTP2Connection # type: ignore[misc]
220
+
221
+ # TODO: Offer 'http/1.1' as well, but for testing purposes this is handy.
222
+ urllib3.util.ssl_.ALPN_PROTOCOLS = ["h2"]
223
+
224
+
225
+ def extract_from_urllib3() -> None:
226
+ HTTPSConnectionPool.ConnectionCls = orig_HTTPSConnection
227
+ urllib3.connection.HTTPSConnection = orig_HTTPSConnection # type: ignore[misc]
228
+
229
+ urllib3.util.ssl_.ALPN_PROTOCOLS = ["http/1.1"]
venv/lib/python3.10/site-packages/urllib3/poolmanager.py ADDED
@@ -0,0 +1,638 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import functools
4
+ import logging
5
+ import typing
6
+ import warnings
7
+ from types import TracebackType
8
+ from urllib.parse import urljoin
9
+
10
+ from ._collections import HTTPHeaderDict, RecentlyUsedContainer
11
+ from ._request_methods import RequestMethods
12
+ from .connection import ProxyConfig
13
+ from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
14
+ from .exceptions import (
15
+ LocationValueError,
16
+ MaxRetryError,
17
+ ProxySchemeUnknown,
18
+ URLSchemeUnknown,
19
+ )
20
+ from .response import BaseHTTPResponse
21
+ from .util.connection import _TYPE_SOCKET_OPTIONS
22
+ from .util.proxy import connection_requires_http_tunnel
23
+ from .util.retry import Retry
24
+ from .util.timeout import Timeout
25
+ from .util.url import Url, parse_url
26
+
27
+ if typing.TYPE_CHECKING:
28
+ import ssl
29
+ from typing import Literal
30
+
31
+ __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
32
+
33
+
34
+ log = logging.getLogger(__name__)
35
+
36
+ SSL_KEYWORDS = (
37
+ "key_file",
38
+ "cert_file",
39
+ "cert_reqs",
40
+ "ca_certs",
41
+ "ca_cert_data",
42
+ "ssl_version",
43
+ "ssl_minimum_version",
44
+ "ssl_maximum_version",
45
+ "ca_cert_dir",
46
+ "ssl_context",
47
+ "key_password",
48
+ "server_hostname",
49
+ )
50
+ # Default value for `blocksize` - a new parameter introduced to
51
+ # http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7
52
+ _DEFAULT_BLOCKSIZE = 16384
53
+
54
+ _SelfT = typing.TypeVar("_SelfT")
55
+
56
+
57
+ class PoolKey(typing.NamedTuple):
58
+ """
59
+ All known keyword arguments that could be provided to the pool manager, its
60
+ pools, or the underlying connections.
61
+
62
+ All custom key schemes should include the fields in this key at a minimum.
63
+ """
64
+
65
+ key_scheme: str
66
+ key_host: str
67
+ key_port: int | None
68
+ key_timeout: Timeout | float | int | None
69
+ key_retries: Retry | bool | int | None
70
+ key_block: bool | None
71
+ key_source_address: tuple[str, int] | None
72
+ key_key_file: str | None
73
+ key_key_password: str | None
74
+ key_cert_file: str | None
75
+ key_cert_reqs: str | None
76
+ key_ca_certs: str | None
77
+ key_ca_cert_data: str | bytes | None
78
+ key_ssl_version: int | str | None
79
+ key_ssl_minimum_version: ssl.TLSVersion | None
80
+ key_ssl_maximum_version: ssl.TLSVersion | None
81
+ key_ca_cert_dir: str | None
82
+ key_ssl_context: ssl.SSLContext | None
83
+ key_maxsize: int | None
84
+ key_headers: frozenset[tuple[str, str]] | None
85
+ key__proxy: Url | None
86
+ key__proxy_headers: frozenset[tuple[str, str]] | None
87
+ key__proxy_config: ProxyConfig | None
88
+ key_socket_options: _TYPE_SOCKET_OPTIONS | None
89
+ key__socks_options: frozenset[tuple[str, str]] | None
90
+ key_assert_hostname: bool | str | None
91
+ key_assert_fingerprint: str | None
92
+ key_server_hostname: str | None
93
+ key_blocksize: int | None
94
+
95
+
96
+ def _default_key_normalizer(
97
+ key_class: type[PoolKey], request_context: dict[str, typing.Any]
98
+ ) -> PoolKey:
99
+ """
100
+ Create a pool key out of a request context dictionary.
101
+
102
+ According to RFC 3986, both the scheme and host are case-insensitive.
103
+ Therefore, this function normalizes both before constructing the pool
104
+ key for an HTTPS request. If you wish to change this behaviour, provide
105
+ alternate callables to ``key_fn_by_scheme``.
106
+
107
+ :param key_class:
108
+ The class to use when constructing the key. This should be a namedtuple
109
+ with the ``scheme`` and ``host`` keys at a minimum.
110
+ :type key_class: namedtuple
111
+ :param request_context:
112
+ A dictionary-like object that contain the context for a request.
113
+ :type request_context: dict
114
+
115
+ :return: A namedtuple that can be used as a connection pool key.
116
+ :rtype: PoolKey
117
+ """
118
+ # Since we mutate the dictionary, make a copy first
119
+ context = request_context.copy()
120
+ context["scheme"] = context["scheme"].lower()
121
+ context["host"] = context["host"].lower()
122
+
123
+ # These are both dictionaries and need to be transformed into frozensets
124
+ for key in ("headers", "_proxy_headers", "_socks_options"):
125
+ if key in context and context[key] is not None:
126
+ context[key] = frozenset(context[key].items())
127
+
128
+ # The socket_options key may be a list and needs to be transformed into a
129
+ # tuple.
130
+ socket_opts = context.get("socket_options")
131
+ if socket_opts is not None:
132
+ context["socket_options"] = tuple(socket_opts)
133
+
134
+ # Map the kwargs to the names in the namedtuple - this is necessary since
135
+ # namedtuples can't have fields starting with '_'.
136
+ for key in list(context.keys()):
137
+ context["key_" + key] = context.pop(key)
138
+
139
+ # Default to ``None`` for keys missing from the context
140
+ for field in key_class._fields:
141
+ if field not in context:
142
+ context[field] = None
143
+
144
+ # Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context
145
+ if context.get("key_blocksize") is None:
146
+ context["key_blocksize"] = _DEFAULT_BLOCKSIZE
147
+
148
+ return key_class(**context)
149
+
150
+
151
+ #: A dictionary that maps a scheme to a callable that creates a pool key.
152
+ #: This can be used to alter the way pool keys are constructed, if desired.
153
+ #: Each PoolManager makes a copy of this dictionary so they can be configured
154
+ #: globally here, or individually on the instance.
155
+ key_fn_by_scheme = {
156
+ "http": functools.partial(_default_key_normalizer, PoolKey),
157
+ "https": functools.partial(_default_key_normalizer, PoolKey),
158
+ }
159
+
160
+ pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
161
+
162
+
163
+ class PoolManager(RequestMethods):
164
+ """
165
+ Allows for arbitrary requests while transparently keeping track of
166
+ necessary connection pools for you.
167
+
168
+ :param num_pools:
169
+ Number of connection pools to cache before discarding the least
170
+ recently used pool.
171
+
172
+ :param headers:
173
+ Headers to include with all requests, unless other headers are given
174
+ explicitly.
175
+
176
+ :param \\**connection_pool_kw:
177
+ Additional parameters are used to create fresh
178
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
179
+
180
+ Example:
181
+
182
+ .. code-block:: python
183
+
184
+ import urllib3
185
+
186
+ http = urllib3.PoolManager(num_pools=2)
187
+
188
+ resp1 = http.request("GET", "https://google.com/")
189
+ resp2 = http.request("GET", "https://google.com/mail")
190
+ resp3 = http.request("GET", "https://yahoo.com/")
191
+
192
+ print(len(http.pools))
193
+ # 2
194
+
195
+ """
196
+
197
+ proxy: Url | None = None
198
+ proxy_config: ProxyConfig | None = None
199
+
200
+ def __init__(
201
+ self,
202
+ num_pools: int = 10,
203
+ headers: typing.Mapping[str, str] | None = None,
204
+ **connection_pool_kw: typing.Any,
205
+ ) -> None:
206
+ super().__init__(headers)
207
+ self.connection_pool_kw = connection_pool_kw
208
+
209
+ self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool]
210
+ self.pools = RecentlyUsedContainer(num_pools)
211
+
212
+ # Locally set the pool classes and keys so other PoolManagers can
213
+ # override them.
214
+ self.pool_classes_by_scheme = pool_classes_by_scheme
215
+ self.key_fn_by_scheme = key_fn_by_scheme.copy()
216
+
217
+ def __enter__(self: _SelfT) -> _SelfT:
218
+ return self
219
+
220
+ def __exit__(
221
+ self,
222
+ exc_type: type[BaseException] | None,
223
+ exc_val: BaseException | None,
224
+ exc_tb: TracebackType | None,
225
+ ) -> Literal[False]:
226
+ self.clear()
227
+ # Return False to re-raise any potential exceptions
228
+ return False
229
+
230
+ def _new_pool(
231
+ self,
232
+ scheme: str,
233
+ host: str,
234
+ port: int,
235
+ request_context: dict[str, typing.Any] | None = None,
236
+ ) -> HTTPConnectionPool:
237
+ """
238
+ Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
239
+ any additional pool keyword arguments.
240
+
241
+ If ``request_context`` is provided, it is provided as keyword arguments
242
+ to the pool class used. This method is used to actually create the
243
+ connection pools handed out by :meth:`connection_from_url` and
244
+ companion methods. It is intended to be overridden for customization.
245
+ """
246
+ pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme]
247
+ if request_context is None:
248
+ request_context = self.connection_pool_kw.copy()
249
+
250
+ # Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly
251
+ # set to 'None' in the request_context.
252
+ if request_context.get("blocksize") is None:
253
+ request_context["blocksize"] = _DEFAULT_BLOCKSIZE
254
+
255
+ # Although the context has everything necessary to create the pool,
256
+ # this function has historically only used the scheme, host, and port
257
+ # in the positional args. When an API change is acceptable these can
258
+ # be removed.
259
+ for key in ("scheme", "host", "port"):
260
+ request_context.pop(key, None)
261
+
262
+ if scheme == "http":
263
+ for kw in SSL_KEYWORDS:
264
+ request_context.pop(kw, None)
265
+
266
+ return pool_cls(host, port, **request_context)
267
+
268
+ def clear(self) -> None:
269
+ """
270
+ Empty our store of pools and direct them all to close.
271
+
272
+ This will not affect in-flight connections, but they will not be
273
+ re-used after completion.
274
+ """
275
+ self.pools.clear()
276
+
277
+ def connection_from_host(
278
+ self,
279
+ host: str | None,
280
+ port: int | None = None,
281
+ scheme: str | None = "http",
282
+ pool_kwargs: dict[str, typing.Any] | None = None,
283
+ ) -> HTTPConnectionPool:
284
+ """
285
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
286
+
287
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
288
+ ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
289
+ provided, it is merged with the instance's ``connection_pool_kw``
290
+ variable and used to create the new connection pool, if one is
291
+ needed.
292
+ """
293
+
294
+ if not host:
295
+ raise LocationValueError("No host specified.")
296
+
297
+ request_context = self._merge_pool_kwargs(pool_kwargs)
298
+ request_context["scheme"] = scheme or "http"
299
+ if not port:
300
+ port = port_by_scheme.get(request_context["scheme"].lower(), 80)
301
+ request_context["port"] = port
302
+ request_context["host"] = host
303
+
304
+ return self.connection_from_context(request_context)
305
+
306
+ def connection_from_context(
307
+ self, request_context: dict[str, typing.Any]
308
+ ) -> HTTPConnectionPool:
309
+ """
310
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
311
+
312
+ ``request_context`` must at least contain the ``scheme`` key and its
313
+ value must be a key in ``key_fn_by_scheme`` instance variable.
314
+ """
315
+ if "strict" in request_context:
316
+ warnings.warn(
317
+ "The 'strict' parameter is no longer needed on Python 3+. "
318
+ "This will raise an error in urllib3 v2.1.0.",
319
+ DeprecationWarning,
320
+ )
321
+ request_context.pop("strict")
322
+
323
+ scheme = request_context["scheme"].lower()
324
+ pool_key_constructor = self.key_fn_by_scheme.get(scheme)
325
+ if not pool_key_constructor:
326
+ raise URLSchemeUnknown(scheme)
327
+ pool_key = pool_key_constructor(request_context)
328
+
329
+ return self.connection_from_pool_key(pool_key, request_context=request_context)
330
+
331
+ def connection_from_pool_key(
332
+ self, pool_key: PoolKey, request_context: dict[str, typing.Any]
333
+ ) -> HTTPConnectionPool:
334
+ """
335
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
336
+
337
+ ``pool_key`` should be a namedtuple that only contains immutable
338
+ objects. At a minimum it must have the ``scheme``, ``host``, and
339
+ ``port`` fields.
340
+ """
341
+ with self.pools.lock:
342
+ # If the scheme, host, or port doesn't match existing open
343
+ # connections, open a new ConnectionPool.
344
+ pool = self.pools.get(pool_key)
345
+ if pool:
346
+ return pool
347
+
348
+ # Make a fresh ConnectionPool of the desired type
349
+ scheme = request_context["scheme"]
350
+ host = request_context["host"]
351
+ port = request_context["port"]
352
+ pool = self._new_pool(scheme, host, port, request_context=request_context)
353
+ self.pools[pool_key] = pool
354
+
355
+ return pool
356
+
357
+ def connection_from_url(
358
+ self, url: str, pool_kwargs: dict[str, typing.Any] | None = None
359
+ ) -> HTTPConnectionPool:
360
+ """
361
+ Similar to :func:`urllib3.connectionpool.connection_from_url`.
362
+
363
+ If ``pool_kwargs`` is not provided and a new pool needs to be
364
+ constructed, ``self.connection_pool_kw`` is used to initialize
365
+ the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
366
+ is provided, it is used instead. Note that if a new pool does not
367
+ need to be created for the request, the provided ``pool_kwargs`` are
368
+ not used.
369
+ """
370
+ u = parse_url(url)
371
+ return self.connection_from_host(
372
+ u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
373
+ )
374
+
375
+ def _merge_pool_kwargs(
376
+ self, override: dict[str, typing.Any] | None
377
+ ) -> dict[str, typing.Any]:
378
+ """
379
+ Merge a dictionary of override values for self.connection_pool_kw.
380
+
381
+ This does not modify self.connection_pool_kw and returns a new dict.
382
+ Any keys in the override dictionary with a value of ``None`` are
383
+ removed from the merged dictionary.
384
+ """
385
+ base_pool_kwargs = self.connection_pool_kw.copy()
386
+ if override:
387
+ for key, value in override.items():
388
+ if value is None:
389
+ try:
390
+ del base_pool_kwargs[key]
391
+ except KeyError:
392
+ pass
393
+ else:
394
+ base_pool_kwargs[key] = value
395
+ return base_pool_kwargs
396
+
397
+ def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool:
398
+ """
399
+ Indicates if the proxy requires the complete destination URL in the
400
+ request. Normally this is only needed when not using an HTTP CONNECT
401
+ tunnel.
402
+ """
403
+ if self.proxy is None:
404
+ return False
405
+
406
+ return not connection_requires_http_tunnel(
407
+ self.proxy, self.proxy_config, parsed_url.scheme
408
+ )
409
+
410
+ def urlopen( # type: ignore[override]
411
+ self, method: str, url: str, redirect: bool = True, **kw: typing.Any
412
+ ) -> BaseHTTPResponse:
413
+ """
414
+ Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
415
+ with custom cross-host redirect logic and only sends the request-uri
416
+ portion of the ``url``.
417
+
418
+ The given ``url`` parameter must be absolute, such that an appropriate
419
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
420
+ """
421
+ u = parse_url(url)
422
+
423
+ if u.scheme is None:
424
+ warnings.warn(
425
+ "URLs without a scheme (ie 'https://') are deprecated and will raise an error "
426
+ "in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs "
427
+ "start with 'https://' or 'http://'. Read more in this issue: "
428
+ "https://github.com/urllib3/urllib3/issues/2920",
429
+ category=DeprecationWarning,
430
+ stacklevel=2,
431
+ )
432
+
433
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
434
+
435
+ kw["assert_same_host"] = False
436
+ kw["redirect"] = False
437
+
438
+ if "headers" not in kw:
439
+ kw["headers"] = self.headers
440
+
441
+ if self._proxy_requires_url_absolute_form(u):
442
+ response = conn.urlopen(method, url, **kw)
443
+ else:
444
+ response = conn.urlopen(method, u.request_uri, **kw)
445
+
446
+ redirect_location = redirect and response.get_redirect_location()
447
+ if not redirect_location:
448
+ return response
449
+
450
+ # Support relative URLs for redirecting.
451
+ redirect_location = urljoin(url, redirect_location)
452
+
453
+ if response.status == 303:
454
+ # Change the method according to RFC 9110, Section 15.4.4.
455
+ method = "GET"
456
+ # And lose the body not to transfer anything sensitive.
457
+ kw["body"] = None
458
+ kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
459
+
460
+ retries = kw.get("retries")
461
+ if not isinstance(retries, Retry):
462
+ retries = Retry.from_int(retries, redirect=redirect)
463
+
464
+ # Strip headers marked as unsafe to forward to the redirected location.
465
+ # Check remove_headers_on_redirect to avoid a potential network call within
466
+ # conn.is_same_host() which may use socket.gethostbyname() in the future.
467
+ if retries.remove_headers_on_redirect and not conn.is_same_host(
468
+ redirect_location
469
+ ):
470
+ new_headers = kw["headers"].copy()
471
+ for header in kw["headers"]:
472
+ if header.lower() in retries.remove_headers_on_redirect:
473
+ new_headers.pop(header, None)
474
+ kw["headers"] = new_headers
475
+
476
+ try:
477
+ retries = retries.increment(method, url, response=response, _pool=conn)
478
+ except MaxRetryError:
479
+ if retries.raise_on_redirect:
480
+ response.drain_conn()
481
+ raise
482
+ return response
483
+
484
+ kw["retries"] = retries
485
+ kw["redirect"] = redirect
486
+
487
+ log.info("Redirecting %s -> %s", url, redirect_location)
488
+
489
+ response.drain_conn()
490
+ return self.urlopen(method, redirect_location, **kw)
491
+
492
+
493
+ class ProxyManager(PoolManager):
494
+ """
495
+ Behaves just like :class:`PoolManager`, but sends all requests through
496
+ the defined proxy, using the CONNECT method for HTTPS URLs.
497
+
498
+ :param proxy_url:
499
+ The URL of the proxy to be used.
500
+
501
+ :param proxy_headers:
502
+ A dictionary containing headers that will be sent to the proxy. In case
503
+ of HTTP they are being sent with each request, while in the
504
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
505
+ authentication.
506
+
507
+ :param proxy_ssl_context:
508
+ The proxy SSL context is used to establish the TLS connection to the
509
+ proxy when using HTTPS proxies.
510
+
511
+ :param use_forwarding_for_https:
512
+ (Defaults to False) If set to True will forward requests to the HTTPS
513
+ proxy to be made on behalf of the client instead of creating a TLS
514
+ tunnel via the CONNECT method. **Enabling this flag means that request
515
+ and response headers and content will be visible from the HTTPS proxy**
516
+ whereas tunneling keeps request and response headers and content
517
+ private. IP address, target hostname, SNI, and port are always visible
518
+ to an HTTPS proxy even when this flag is disabled.
519
+
520
+ :param proxy_assert_hostname:
521
+ The hostname of the certificate to verify against.
522
+
523
+ :param proxy_assert_fingerprint:
524
+ The fingerprint of the certificate to verify against.
525
+
526
+ Example:
527
+
528
+ .. code-block:: python
529
+
530
+ import urllib3
531
+
532
+ proxy = urllib3.ProxyManager("https://localhost:3128/")
533
+
534
+ resp1 = proxy.request("GET", "https://google.com/")
535
+ resp2 = proxy.request("GET", "https://httpbin.org/")
536
+
537
+ print(len(proxy.pools))
538
+ # 1
539
+
540
+ resp3 = proxy.request("GET", "https://httpbin.org/")
541
+ resp4 = proxy.request("GET", "https://twitter.com/")
542
+
543
+ print(len(proxy.pools))
544
+ # 3
545
+
546
+ """
547
+
548
+ def __init__(
549
+ self,
550
+ proxy_url: str,
551
+ num_pools: int = 10,
552
+ headers: typing.Mapping[str, str] | None = None,
553
+ proxy_headers: typing.Mapping[str, str] | None = None,
554
+ proxy_ssl_context: ssl.SSLContext | None = None,
555
+ use_forwarding_for_https: bool = False,
556
+ proxy_assert_hostname: None | str | Literal[False] = None,
557
+ proxy_assert_fingerprint: str | None = None,
558
+ **connection_pool_kw: typing.Any,
559
+ ) -> None:
560
+ if isinstance(proxy_url, HTTPConnectionPool):
561
+ str_proxy_url = f"{proxy_url.scheme}://{proxy_url.host}:{proxy_url.port}"
562
+ else:
563
+ str_proxy_url = proxy_url
564
+ proxy = parse_url(str_proxy_url)
565
+
566
+ if proxy.scheme not in ("http", "https"):
567
+ raise ProxySchemeUnknown(proxy.scheme)
568
+
569
+ if not proxy.port:
570
+ port = port_by_scheme.get(proxy.scheme, 80)
571
+ proxy = proxy._replace(port=port)
572
+
573
+ self.proxy = proxy
574
+ self.proxy_headers = proxy_headers or {}
575
+ self.proxy_ssl_context = proxy_ssl_context
576
+ self.proxy_config = ProxyConfig(
577
+ proxy_ssl_context,
578
+ use_forwarding_for_https,
579
+ proxy_assert_hostname,
580
+ proxy_assert_fingerprint,
581
+ )
582
+
583
+ connection_pool_kw["_proxy"] = self.proxy
584
+ connection_pool_kw["_proxy_headers"] = self.proxy_headers
585
+ connection_pool_kw["_proxy_config"] = self.proxy_config
586
+
587
+ super().__init__(num_pools, headers, **connection_pool_kw)
588
+
589
+ def connection_from_host(
590
+ self,
591
+ host: str | None,
592
+ port: int | None = None,
593
+ scheme: str | None = "http",
594
+ pool_kwargs: dict[str, typing.Any] | None = None,
595
+ ) -> HTTPConnectionPool:
596
+ if scheme == "https":
597
+ return super().connection_from_host(
598
+ host, port, scheme, pool_kwargs=pool_kwargs
599
+ )
600
+
601
+ return super().connection_from_host(
602
+ self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs # type: ignore[union-attr]
603
+ )
604
+
605
+ def _set_proxy_headers(
606
+ self, url: str, headers: typing.Mapping[str, str] | None = None
607
+ ) -> typing.Mapping[str, str]:
608
+ """
609
+ Sets headers needed by proxies: specifically, the Accept and Host
610
+ headers. Only sets headers not provided by the user.
611
+ """
612
+ headers_ = {"Accept": "*/*"}
613
+
614
+ netloc = parse_url(url).netloc
615
+ if netloc:
616
+ headers_["Host"] = netloc
617
+
618
+ if headers:
619
+ headers_.update(headers)
620
+ return headers_
621
+
622
+ def urlopen( # type: ignore[override]
623
+ self, method: str, url: str, redirect: bool = True, **kw: typing.Any
624
+ ) -> BaseHTTPResponse:
625
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
626
+ u = parse_url(url)
627
+ if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
628
+ # For connections using HTTP CONNECT, httplib sets the necessary
629
+ # headers on the CONNECT to the proxy. If we're not using CONNECT,
630
+ # we'll definitely need to set 'Host' at the very least.
631
+ headers = kw.get("headers", self.headers)
632
+ kw["headers"] = self._set_proxy_headers(url, headers)
633
+
634
+ return super().urlopen(method, url, redirect=redirect, **kw)
635
+
636
+
637
+ def proxy_from_url(url: str, **kw: typing.Any) -> ProxyManager:
638
+ return ProxyManager(proxy_url=url, **kw)