applied-ai-018 commited on
Commit
a577093
·
verified ·
1 Parent(s): 5268297

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/universal/global_step80/zero/15.input_layernorm.weight/exp_avg.pt +3 -0
  2. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/__init__.py +24 -0
  3. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/clique.py +258 -0
  4. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/connectivity.py +412 -0
  5. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/distance_measures.py +150 -0
  6. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/dominating_set.py +148 -0
  7. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py +369 -0
  8. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/matching.py +43 -0
  9. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/maxcut.py +143 -0
  10. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/ramsey.py +52 -0
  11. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py +1498 -0
  12. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/treewidth.py +252 -0
  13. venv/lib/python3.10/site-packages/networkx/algorithms/approximation/vertex_cover.py +82 -0
  14. venv/lib/python3.10/site-packages/networkx/algorithms/boundary.py +167 -0
  15. venv/lib/python3.10/site-packages/networkx/algorithms/bridges.py +205 -0
  16. venv/lib/python3.10/site-packages/networkx/algorithms/broadcasting.py +155 -0
  17. venv/lib/python3.10/site-packages/networkx/algorithms/chordal.py +442 -0
  18. venv/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py +162 -0
  19. venv/lib/python3.10/site-packages/networkx/algorithms/covering.py +142 -0
  20. venv/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py +951 -0
  21. venv/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py +238 -0
  22. venv/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py +48 -0
  23. venv/lib/python3.10/site-packages/networkx/algorithms/isolate.py +107 -0
  24. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py +7 -0
  25. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc +0 -0
  26. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc +0 -0
  27. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc +0 -0
  28. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc +0 -0
  29. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc +0 -0
  30. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc +0 -0
  31. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc +0 -0
  32. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc +0 -0
  33. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc +0 -0
  34. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py +1163 -0
  35. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py +248 -0
  36. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py +1065 -0
  37. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py +351 -0
  38. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py +304 -0
  39. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py +0 -0
  40. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  41. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc +0 -0
  42. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc +0 -0
  43. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc +0 -0
  44. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc +0 -0
  45. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc +0 -0
  46. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc +0 -0
  47. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc +0 -0
  48. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc +0 -0
  49. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc +0 -0
  50. venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 +0 -0
ckpts/universal/global_step80/zero/15.input_layernorm.weight/exp_avg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d03f5cc7d1db3d15cf919ab6fba361ee207dd5c5712b31271caf19bc2d00e779
3
+ size 9372
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/__init__.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Approximations of graph properties and Heuristic methods for optimization.
2
+
3
+ The functions in this class are not imported into the top-level ``networkx``
4
+ namespace so the easiest way to use them is with::
5
+
6
+ >>> from networkx.algorithms import approximation
7
+
8
+ Another option is to import the specific function with
9
+ ``from networkx.algorithms.approximation import function_name``.
10
+
11
+ """
12
+ from networkx.algorithms.approximation.clustering_coefficient import *
13
+ from networkx.algorithms.approximation.clique import *
14
+ from networkx.algorithms.approximation.connectivity import *
15
+ from networkx.algorithms.approximation.distance_measures import *
16
+ from networkx.algorithms.approximation.dominating_set import *
17
+ from networkx.algorithms.approximation.kcomponents import *
18
+ from networkx.algorithms.approximation.matching import *
19
+ from networkx.algorithms.approximation.ramsey import *
20
+ from networkx.algorithms.approximation.steinertree import *
21
+ from networkx.algorithms.approximation.traveling_salesman import *
22
+ from networkx.algorithms.approximation.treewidth import *
23
+ from networkx.algorithms.approximation.vertex_cover import *
24
+ from networkx.algorithms.approximation.maxcut import *
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/clique.py ADDED
@@ -0,0 +1,258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing large cliques and maximum independent sets."""
2
+ import networkx as nx
3
+ from networkx.algorithms.approximation import ramsey
4
+ from networkx.utils import not_implemented_for
5
+
6
+ __all__ = [
7
+ "clique_removal",
8
+ "max_clique",
9
+ "large_clique_size",
10
+ "maximum_independent_set",
11
+ ]
12
+
13
+
14
+ @not_implemented_for("directed")
15
+ @not_implemented_for("multigraph")
16
+ @nx._dispatchable
17
+ def maximum_independent_set(G):
18
+ """Returns an approximate maximum independent set.
19
+
20
+ Independent set or stable set is a set of vertices in a graph, no two of
21
+ which are adjacent. That is, it is a set I of vertices such that for every
22
+ two vertices in I, there is no edge connecting the two. Equivalently, each
23
+ edge in the graph has at most one endpoint in I. The size of an independent
24
+ set is the number of vertices it contains [1]_.
25
+
26
+ A maximum independent set is a largest independent set for a given graph G
27
+ and its size is denoted $\\alpha(G)$. The problem of finding such a set is called
28
+ the maximum independent set problem and is an NP-hard optimization problem.
29
+ As such, it is unlikely that there exists an efficient algorithm for finding
30
+ a maximum independent set of a graph.
31
+
32
+ The Independent Set algorithm is based on [2]_.
33
+
34
+ Parameters
35
+ ----------
36
+ G : NetworkX graph
37
+ Undirected graph
38
+
39
+ Returns
40
+ -------
41
+ iset : Set
42
+ The apx-maximum independent set
43
+
44
+ Examples
45
+ --------
46
+ >>> G = nx.path_graph(10)
47
+ >>> nx.approximation.maximum_independent_set(G)
48
+ {0, 2, 4, 6, 9}
49
+
50
+ Raises
51
+ ------
52
+ NetworkXNotImplemented
53
+ If the graph is directed or is a multigraph.
54
+
55
+ Notes
56
+ -----
57
+ Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case.
58
+
59
+ References
60
+ ----------
61
+ .. [1] `Wikipedia: Independent set
62
+ <https://en.wikipedia.org/wiki/Independent_set_(graph_theory)>`_
63
+ .. [2] Boppana, R., & Halldórsson, M. M. (1992).
64
+ Approximating maximum independent sets by excluding subgraphs.
65
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
66
+ """
67
+ iset, _ = clique_removal(G)
68
+ return iset
69
+
70
+
71
+ @not_implemented_for("directed")
72
+ @not_implemented_for("multigraph")
73
+ @nx._dispatchable
74
+ def max_clique(G):
75
+ r"""Find the Maximum Clique
76
+
77
+ Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set
78
+ in the worst case.
79
+
80
+ Parameters
81
+ ----------
82
+ G : NetworkX graph
83
+ Undirected graph
84
+
85
+ Returns
86
+ -------
87
+ clique : set
88
+ The apx-maximum clique of the graph
89
+
90
+ Examples
91
+ --------
92
+ >>> G = nx.path_graph(10)
93
+ >>> nx.approximation.max_clique(G)
94
+ {8, 9}
95
+
96
+ Raises
97
+ ------
98
+ NetworkXNotImplemented
99
+ If the graph is directed or is a multigraph.
100
+
101
+ Notes
102
+ -----
103
+ A clique in an undirected graph G = (V, E) is a subset of the vertex set
104
+ `C \subseteq V` such that for every two vertices in C there exists an edge
105
+ connecting the two. This is equivalent to saying that the subgraph
106
+ induced by C is complete (in some cases, the term clique may also refer
107
+ to the subgraph).
108
+
109
+ A maximum clique is a clique of the largest possible size in a given graph.
110
+ The clique number `\omega(G)` of a graph G is the number of
111
+ vertices in a maximum clique in G. The intersection number of
112
+ G is the smallest number of cliques that together cover all edges of G.
113
+
114
+ https://en.wikipedia.org/wiki/Maximum_clique
115
+
116
+ References
117
+ ----------
118
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
119
+ Approximating maximum independent sets by excluding subgraphs.
120
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
121
+ doi:10.1007/BF01994876
122
+ """
123
+ # finding the maximum clique in a graph is equivalent to finding
124
+ # the independent set in the complementary graph
125
+ cgraph = nx.complement(G)
126
+ iset, _ = clique_removal(cgraph)
127
+ return iset
128
+
129
+
130
+ @not_implemented_for("directed")
131
+ @not_implemented_for("multigraph")
132
+ @nx._dispatchable
133
+ def clique_removal(G):
134
+ r"""Repeatedly remove cliques from the graph.
135
+
136
+ Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique
137
+ and independent set. Returns the largest independent set found, along
138
+ with found maximal cliques.
139
+
140
+ Parameters
141
+ ----------
142
+ G : NetworkX graph
143
+ Undirected graph
144
+
145
+ Returns
146
+ -------
147
+ max_ind_cliques : (set, list) tuple
148
+ 2-tuple of Maximal Independent Set and list of maximal cliques (sets).
149
+
150
+ Examples
151
+ --------
152
+ >>> G = nx.path_graph(10)
153
+ >>> nx.approximation.clique_removal(G)
154
+ ({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}])
155
+
156
+ Raises
157
+ ------
158
+ NetworkXNotImplemented
159
+ If the graph is directed or is a multigraph.
160
+
161
+ References
162
+ ----------
163
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
164
+ Approximating maximum independent sets by excluding subgraphs.
165
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
166
+ """
167
+ graph = G.copy()
168
+ c_i, i_i = ramsey.ramsey_R2(graph)
169
+ cliques = [c_i]
170
+ isets = [i_i]
171
+ while graph:
172
+ graph.remove_nodes_from(c_i)
173
+ c_i, i_i = ramsey.ramsey_R2(graph)
174
+ if c_i:
175
+ cliques.append(c_i)
176
+ if i_i:
177
+ isets.append(i_i)
178
+ # Determine the largest independent set as measured by cardinality.
179
+ maxiset = max(isets, key=len)
180
+ return maxiset, cliques
181
+
182
+
183
+ @not_implemented_for("directed")
184
+ @not_implemented_for("multigraph")
185
+ @nx._dispatchable
186
+ def large_clique_size(G):
187
+ """Find the size of a large clique in a graph.
188
+
189
+ A *clique* is a subset of nodes in which each pair of nodes is
190
+ adjacent. This function is a heuristic for finding the size of a
191
+ large clique in the graph.
192
+
193
+ Parameters
194
+ ----------
195
+ G : NetworkX graph
196
+
197
+ Returns
198
+ -------
199
+ k: integer
200
+ The size of a large clique in the graph.
201
+
202
+ Examples
203
+ --------
204
+ >>> G = nx.path_graph(10)
205
+ >>> nx.approximation.large_clique_size(G)
206
+ 2
207
+
208
+ Raises
209
+ ------
210
+ NetworkXNotImplemented
211
+ If the graph is directed or is a multigraph.
212
+
213
+ Notes
214
+ -----
215
+ This implementation is from [1]_. Its worst case time complexity is
216
+ :math:`O(n d^2)`, where *n* is the number of nodes in the graph and
217
+ *d* is the maximum degree.
218
+
219
+ This function is a heuristic, which means it may work well in
220
+ practice, but there is no rigorous mathematical guarantee on the
221
+ ratio between the returned number and the actual largest clique size
222
+ in the graph.
223
+
224
+ References
225
+ ----------
226
+ .. [1] Pattabiraman, Bharath, et al.
227
+ "Fast Algorithms for the Maximum Clique Problem on Massive Graphs
228
+ with Applications to Overlapping Community Detection."
229
+ *Internet Mathematics* 11.4-5 (2015): 421--448.
230
+ <https://doi.org/10.1080/15427951.2014.986778>
231
+
232
+ See also
233
+ --------
234
+
235
+ :func:`networkx.algorithms.approximation.clique.max_clique`
236
+ A function that returns an approximate maximum clique with a
237
+ guarantee on the approximation ratio.
238
+
239
+ :mod:`networkx.algorithms.clique`
240
+ Functions for finding the exact maximum clique in a graph.
241
+
242
+ """
243
+ degrees = G.degree
244
+
245
+ def _clique_heuristic(G, U, size, best_size):
246
+ if not U:
247
+ return max(best_size, size)
248
+ u = max(U, key=degrees)
249
+ U.remove(u)
250
+ N_prime = {v for v in G[u] if degrees[v] >= best_size}
251
+ return _clique_heuristic(G, U & N_prime, size + 1, best_size)
252
+
253
+ best_size = 0
254
+ nodes = (u for u in G if degrees[u] >= best_size)
255
+ for u in nodes:
256
+ neighbors = {v for v in G[u] if degrees[v] >= best_size}
257
+ best_size = _clique_heuristic(G, neighbors, 1, best_size)
258
+ return best_size
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/connectivity.py ADDED
@@ -0,0 +1,412 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Fast approximation for node connectivity
2
+ """
3
+ import itertools
4
+ from operator import itemgetter
5
+
6
+ import networkx as nx
7
+
8
+ __all__ = [
9
+ "local_node_connectivity",
10
+ "node_connectivity",
11
+ "all_pairs_node_connectivity",
12
+ ]
13
+
14
+
15
+ @nx._dispatchable(name="approximate_local_node_connectivity")
16
+ def local_node_connectivity(G, source, target, cutoff=None):
17
+ """Compute node connectivity between source and target.
18
+
19
+ Pairwise or local node connectivity between two distinct and nonadjacent
20
+ nodes is the minimum number of nodes that must be removed (minimum
21
+ separating cutset) to disconnect them. By Menger's theorem, this is equal
22
+ to the number of node independent paths (paths that share no nodes other
23
+ than source and target). Which is what we compute in this function.
24
+
25
+ This algorithm is a fast approximation that gives an strict lower
26
+ bound on the actual number of node independent paths between two nodes [1]_.
27
+ It works for both directed and undirected graphs.
28
+
29
+ Parameters
30
+ ----------
31
+
32
+ G : NetworkX graph
33
+
34
+ source : node
35
+ Starting node for node connectivity
36
+
37
+ target : node
38
+ Ending node for node connectivity
39
+
40
+ cutoff : integer
41
+ Maximum node connectivity to consider. If None, the minimum degree
42
+ of source or target is used as a cutoff. Default value None.
43
+
44
+ Returns
45
+ -------
46
+ k: integer
47
+ pairwise node connectivity
48
+
49
+ Examples
50
+ --------
51
+ >>> # Platonic octahedral graph has node connectivity 4
52
+ >>> # for each non adjacent node pair
53
+ >>> from networkx.algorithms import approximation as approx
54
+ >>> G = nx.octahedral_graph()
55
+ >>> approx.local_node_connectivity(G, 0, 5)
56
+ 4
57
+
58
+ Notes
59
+ -----
60
+ This algorithm [1]_ finds node independents paths between two nodes by
61
+ computing their shortest path using BFS, marking the nodes of the path
62
+ found as 'used' and then searching other shortest paths excluding the
63
+ nodes marked as used until no more paths exist. It is not exact because
64
+ a shortest path could use nodes that, if the path were longer, may belong
65
+ to two different node independent paths. Thus it only guarantees an
66
+ strict lower bound on node connectivity.
67
+
68
+ Note that the authors propose a further refinement, losing accuracy and
69
+ gaining speed, which is not implemented yet.
70
+
71
+ See also
72
+ --------
73
+ all_pairs_node_connectivity
74
+ node_connectivity
75
+
76
+ References
77
+ ----------
78
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
79
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
80
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
81
+
82
+ """
83
+ if target == source:
84
+ raise nx.NetworkXError("source and target have to be different nodes.")
85
+
86
+ # Maximum possible node independent paths
87
+ if G.is_directed():
88
+ possible = min(G.out_degree(source), G.in_degree(target))
89
+ else:
90
+ possible = min(G.degree(source), G.degree(target))
91
+
92
+ K = 0
93
+ if not possible:
94
+ return K
95
+
96
+ if cutoff is None:
97
+ cutoff = float("inf")
98
+
99
+ exclude = set()
100
+ for i in range(min(possible, cutoff)):
101
+ try:
102
+ path = _bidirectional_shortest_path(G, source, target, exclude)
103
+ exclude.update(set(path))
104
+ K += 1
105
+ except nx.NetworkXNoPath:
106
+ break
107
+
108
+ return K
109
+
110
+
111
+ @nx._dispatchable(name="approximate_node_connectivity")
112
+ def node_connectivity(G, s=None, t=None):
113
+ r"""Returns an approximation for node connectivity for a graph or digraph G.
114
+
115
+ Node connectivity is equal to the minimum number of nodes that
116
+ must be removed to disconnect G or render it trivial. By Menger's theorem,
117
+ this is equal to the number of node independent paths (paths that
118
+ share no nodes other than source and target).
119
+
120
+ If source and target nodes are provided, this function returns the
121
+ local node connectivity: the minimum number of nodes that must be
122
+ removed to break all paths from source to target in G.
123
+
124
+ This algorithm is based on a fast approximation that gives an strict lower
125
+ bound on the actual number of node independent paths between two nodes [1]_.
126
+ It works for both directed and undirected graphs.
127
+
128
+ Parameters
129
+ ----------
130
+ G : NetworkX graph
131
+ Undirected graph
132
+
133
+ s : node
134
+ Source node. Optional. Default value: None.
135
+
136
+ t : node
137
+ Target node. Optional. Default value: None.
138
+
139
+ Returns
140
+ -------
141
+ K : integer
142
+ Node connectivity of G, or local node connectivity if source
143
+ and target are provided.
144
+
145
+ Examples
146
+ --------
147
+ >>> # Platonic octahedral graph is 4-node-connected
148
+ >>> from networkx.algorithms import approximation as approx
149
+ >>> G = nx.octahedral_graph()
150
+ >>> approx.node_connectivity(G)
151
+ 4
152
+
153
+ Notes
154
+ -----
155
+ This algorithm [1]_ finds node independents paths between two nodes by
156
+ computing their shortest path using BFS, marking the nodes of the path
157
+ found as 'used' and then searching other shortest paths excluding the
158
+ nodes marked as used until no more paths exist. It is not exact because
159
+ a shortest path could use nodes that, if the path were longer, may belong
160
+ to two different node independent paths. Thus it only guarantees an
161
+ strict lower bound on node connectivity.
162
+
163
+ See also
164
+ --------
165
+ all_pairs_node_connectivity
166
+ local_node_connectivity
167
+
168
+ References
169
+ ----------
170
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
171
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
172
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
173
+
174
+ """
175
+ if (s is not None and t is None) or (s is None and t is not None):
176
+ raise nx.NetworkXError("Both source and target must be specified.")
177
+
178
+ # Local node connectivity
179
+ if s is not None and t is not None:
180
+ if s not in G:
181
+ raise nx.NetworkXError(f"node {s} not in graph")
182
+ if t not in G:
183
+ raise nx.NetworkXError(f"node {t} not in graph")
184
+ return local_node_connectivity(G, s, t)
185
+
186
+ # Global node connectivity
187
+ if G.is_directed():
188
+ connected_func = nx.is_weakly_connected
189
+ iter_func = itertools.permutations
190
+
191
+ def neighbors(v):
192
+ return itertools.chain(G.predecessors(v), G.successors(v))
193
+
194
+ else:
195
+ connected_func = nx.is_connected
196
+ iter_func = itertools.combinations
197
+ neighbors = G.neighbors
198
+
199
+ if not connected_func(G):
200
+ return 0
201
+
202
+ # Choose a node with minimum degree
203
+ v, minimum_degree = min(G.degree(), key=itemgetter(1))
204
+ # Node connectivity is bounded by minimum degree
205
+ K = minimum_degree
206
+ # compute local node connectivity with all non-neighbors nodes
207
+ # and store the minimum
208
+ for w in set(G) - set(neighbors(v)) - {v}:
209
+ K = min(K, local_node_connectivity(G, v, w, cutoff=K))
210
+ # Same for non adjacent pairs of neighbors of v
211
+ for x, y in iter_func(neighbors(v), 2):
212
+ if y not in G[x] and x != y:
213
+ K = min(K, local_node_connectivity(G, x, y, cutoff=K))
214
+ return K
215
+
216
+
217
+ @nx._dispatchable(name="approximate_all_pairs_node_connectivity")
218
+ def all_pairs_node_connectivity(G, nbunch=None, cutoff=None):
219
+ """Compute node connectivity between all pairs of nodes.
220
+
221
+ Pairwise or local node connectivity between two distinct and nonadjacent
222
+ nodes is the minimum number of nodes that must be removed (minimum
223
+ separating cutset) to disconnect them. By Menger's theorem, this is equal
224
+ to the number of node independent paths (paths that share no nodes other
225
+ than source and target). Which is what we compute in this function.
226
+
227
+ This algorithm is a fast approximation that gives an strict lower
228
+ bound on the actual number of node independent paths between two nodes [1]_.
229
+ It works for both directed and undirected graphs.
230
+
231
+
232
+ Parameters
233
+ ----------
234
+ G : NetworkX graph
235
+
236
+ nbunch: container
237
+ Container of nodes. If provided node connectivity will be computed
238
+ only over pairs of nodes in nbunch.
239
+
240
+ cutoff : integer
241
+ Maximum node connectivity to consider. If None, the minimum degree
242
+ of source or target is used as a cutoff in each pair of nodes.
243
+ Default value None.
244
+
245
+ Returns
246
+ -------
247
+ K : dictionary
248
+ Dictionary, keyed by source and target, of pairwise node connectivity
249
+
250
+ Examples
251
+ --------
252
+ A 3 node cycle with one extra node attached has connectivity 2 between all
253
+ nodes in the cycle and connectivity 1 between the extra node and the rest:
254
+
255
+ >>> G = nx.cycle_graph(3)
256
+ >>> G.add_edge(2, 3)
257
+ >>> import pprint # for nice dictionary formatting
258
+ >>> pprint.pprint(nx.all_pairs_node_connectivity(G))
259
+ {0: {1: 2, 2: 2, 3: 1},
260
+ 1: {0: 2, 2: 2, 3: 1},
261
+ 2: {0: 2, 1: 2, 3: 1},
262
+ 3: {0: 1, 1: 1, 2: 1}}
263
+
264
+ See Also
265
+ --------
266
+ local_node_connectivity
267
+ node_connectivity
268
+
269
+ References
270
+ ----------
271
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
272
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
273
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
274
+ """
275
+ if nbunch is None:
276
+ nbunch = G
277
+ else:
278
+ nbunch = set(nbunch)
279
+
280
+ directed = G.is_directed()
281
+ if directed:
282
+ iter_func = itertools.permutations
283
+ else:
284
+ iter_func = itertools.combinations
285
+
286
+ all_pairs = {n: {} for n in nbunch}
287
+
288
+ for u, v in iter_func(nbunch, 2):
289
+ k = local_node_connectivity(G, u, v, cutoff=cutoff)
290
+ all_pairs[u][v] = k
291
+ if not directed:
292
+ all_pairs[v][u] = k
293
+
294
+ return all_pairs
295
+
296
+
297
+ def _bidirectional_shortest_path(G, source, target, exclude):
298
+ """Returns shortest path between source and target ignoring nodes in the
299
+ container 'exclude'.
300
+
301
+ Parameters
302
+ ----------
303
+
304
+ G : NetworkX graph
305
+
306
+ source : node
307
+ Starting node for path
308
+
309
+ target : node
310
+ Ending node for path
311
+
312
+ exclude: container
313
+ Container for nodes to exclude from the search for shortest paths
314
+
315
+ Returns
316
+ -------
317
+ path: list
318
+ Shortest path between source and target ignoring nodes in 'exclude'
319
+
320
+ Raises
321
+ ------
322
+ NetworkXNoPath
323
+ If there is no path or if nodes are adjacent and have only one path
324
+ between them
325
+
326
+ Notes
327
+ -----
328
+ This function and its helper are originally from
329
+ networkx.algorithms.shortest_paths.unweighted and are modified to
330
+ accept the extra parameter 'exclude', which is a container for nodes
331
+ already used in other paths that should be ignored.
332
+
333
+ References
334
+ ----------
335
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
336
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
337
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
338
+
339
+ """
340
+ # call helper to do the real work
341
+ results = _bidirectional_pred_succ(G, source, target, exclude)
342
+ pred, succ, w = results
343
+
344
+ # build path from pred+w+succ
345
+ path = []
346
+ # from source to w
347
+ while w is not None:
348
+ path.append(w)
349
+ w = pred[w]
350
+ path.reverse()
351
+ # from w to target
352
+ w = succ[path[-1]]
353
+ while w is not None:
354
+ path.append(w)
355
+ w = succ[w]
356
+
357
+ return path
358
+
359
+
360
+ def _bidirectional_pred_succ(G, source, target, exclude):
361
+ # does BFS from both source and target and meets in the middle
362
+ # excludes nodes in the container "exclude" from the search
363
+
364
+ # handle either directed or undirected
365
+ if G.is_directed():
366
+ Gpred = G.predecessors
367
+ Gsucc = G.successors
368
+ else:
369
+ Gpred = G.neighbors
370
+ Gsucc = G.neighbors
371
+
372
+ # predecessor and successors in search
373
+ pred = {source: None}
374
+ succ = {target: None}
375
+
376
+ # initialize fringes, start with forward
377
+ forward_fringe = [source]
378
+ reverse_fringe = [target]
379
+
380
+ level = 0
381
+
382
+ while forward_fringe and reverse_fringe:
383
+ # Make sure that we iterate one step forward and one step backwards
384
+ # thus source and target will only trigger "found path" when they are
385
+ # adjacent and then they can be safely included in the container 'exclude'
386
+ level += 1
387
+ if level % 2 != 0:
388
+ this_level = forward_fringe
389
+ forward_fringe = []
390
+ for v in this_level:
391
+ for w in Gsucc(v):
392
+ if w in exclude:
393
+ continue
394
+ if w not in pred:
395
+ forward_fringe.append(w)
396
+ pred[w] = v
397
+ if w in succ:
398
+ return pred, succ, w # found path
399
+ else:
400
+ this_level = reverse_fringe
401
+ reverse_fringe = []
402
+ for v in this_level:
403
+ for w in Gpred(v):
404
+ if w in exclude:
405
+ continue
406
+ if w not in succ:
407
+ succ[w] = v
408
+ reverse_fringe.append(w)
409
+ if w in pred:
410
+ return pred, succ, w # found path
411
+
412
+ raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/distance_measures.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Distance measures approximated metrics."""
2
+
3
+ import networkx as nx
4
+ from networkx.utils.decorators import py_random_state
5
+
6
+ __all__ = ["diameter"]
7
+
8
+
9
+ @py_random_state(1)
10
+ @nx._dispatchable(name="approximate_diameter")
11
+ def diameter(G, seed=None):
12
+ """Returns a lower bound on the diameter of the graph G.
13
+
14
+ The function computes a lower bound on the diameter (i.e., the maximum eccentricity)
15
+ of a directed or undirected graph G. The procedure used varies depending on the graph
16
+ being directed or not.
17
+
18
+ If G is an `undirected` graph, then the function uses the `2-sweep` algorithm [1]_.
19
+ The main idea is to pick the farthest node from a random node and return its eccentricity.
20
+
21
+ Otherwise, if G is a `directed` graph, the function uses the `2-dSweep` algorithm [2]_,
22
+ The procedure starts by selecting a random source node $s$ from which it performs a
23
+ forward and a backward BFS. Let $a_1$ and $a_2$ be the farthest nodes in the forward and
24
+ backward cases, respectively. Then, it computes the backward eccentricity of $a_1$ using
25
+ a backward BFS and the forward eccentricity of $a_2$ using a forward BFS.
26
+ Finally, it returns the best lower bound between the two.
27
+
28
+ In both cases, the time complexity is linear with respect to the size of G.
29
+
30
+ Parameters
31
+ ----------
32
+ G : NetworkX graph
33
+
34
+ seed : integer, random_state, or None (default)
35
+ Indicator of random number generation state.
36
+ See :ref:`Randomness<randomness>`.
37
+
38
+ Returns
39
+ -------
40
+ d : integer
41
+ Lower Bound on the Diameter of G
42
+
43
+ Examples
44
+ --------
45
+ >>> G = nx.path_graph(10) # undirected graph
46
+ >>> nx.diameter(G)
47
+ 9
48
+ >>> G = nx.cycle_graph(3, create_using=nx.DiGraph) # directed graph
49
+ >>> nx.diameter(G)
50
+ 2
51
+
52
+ Raises
53
+ ------
54
+ NetworkXError
55
+ If the graph is empty or
56
+ If the graph is undirected and not connected or
57
+ If the graph is directed and not strongly connected.
58
+
59
+ See Also
60
+ --------
61
+ networkx.algorithms.distance_measures.diameter
62
+
63
+ References
64
+ ----------
65
+ .. [1] Magnien, Clémence, Matthieu Latapy, and Michel Habib.
66
+ *Fast computation of empirically tight bounds for the diameter of massive graphs.*
67
+ Journal of Experimental Algorithmics (JEA), 2009.
68
+ https://arxiv.org/pdf/0904.2728.pdf
69
+ .. [2] Crescenzi, Pierluigi, Roberto Grossi, Leonardo Lanzi, and Andrea Marino.
70
+ *On computing the diameter of real-world directed (weighted) graphs.*
71
+ International Symposium on Experimental Algorithms. Springer, Berlin, Heidelberg, 2012.
72
+ https://courses.cs.ut.ee/MTAT.03.238/2014_fall/uploads/Main/diameter.pdf
73
+ """
74
+ # if G is empty
75
+ if not G:
76
+ raise nx.NetworkXError("Expected non-empty NetworkX graph!")
77
+ # if there's only a node
78
+ if G.number_of_nodes() == 1:
79
+ return 0
80
+ # if G is directed
81
+ if G.is_directed():
82
+ return _two_sweep_directed(G, seed)
83
+ # else if G is undirected
84
+ return _two_sweep_undirected(G, seed)
85
+
86
+
87
+ def _two_sweep_undirected(G, seed):
88
+ """Helper function for finding a lower bound on the diameter
89
+ for undirected Graphs.
90
+
91
+ The idea is to pick the farthest node from a random node
92
+ and return its eccentricity.
93
+
94
+ ``G`` is a NetworkX undirected graph.
95
+
96
+ .. note::
97
+
98
+ ``seed`` is a random.Random or numpy.random.RandomState instance
99
+ """
100
+ # select a random source node
101
+ source = seed.choice(list(G))
102
+ # get the distances to the other nodes
103
+ distances = nx.shortest_path_length(G, source)
104
+ # if some nodes have not been visited, then the graph is not connected
105
+ if len(distances) != len(G):
106
+ raise nx.NetworkXError("Graph not connected.")
107
+ # take a node that is (one of) the farthest nodes from the source
108
+ *_, node = distances
109
+ # return the eccentricity of the node
110
+ return nx.eccentricity(G, node)
111
+
112
+
113
+ def _two_sweep_directed(G, seed):
114
+ """Helper function for finding a lower bound on the diameter
115
+ for directed Graphs.
116
+
117
+ It implements 2-dSweep, the directed version of the 2-sweep algorithm.
118
+ The algorithm follows the following steps.
119
+ 1. Select a source node $s$ at random.
120
+ 2. Perform a forward BFS from $s$ to select a node $a_1$ at the maximum
121
+ distance from the source, and compute $LB_1$, the backward eccentricity of $a_1$.
122
+ 3. Perform a backward BFS from $s$ to select a node $a_2$ at the maximum
123
+ distance from the source, and compute $LB_2$, the forward eccentricity of $a_2$.
124
+ 4. Return the maximum between $LB_1$ and $LB_2$.
125
+
126
+ ``G`` is a NetworkX directed graph.
127
+
128
+ .. note::
129
+
130
+ ``seed`` is a random.Random or numpy.random.RandomState instance
131
+ """
132
+ # get a new digraph G' with the edges reversed in the opposite direction
133
+ G_reversed = G.reverse()
134
+ # select a random source node
135
+ source = seed.choice(list(G))
136
+ # compute forward distances from source
137
+ forward_distances = nx.shortest_path_length(G, source)
138
+ # compute backward distances from source
139
+ backward_distances = nx.shortest_path_length(G_reversed, source)
140
+ # if either the source can't reach every node or not every node
141
+ # can reach the source, then the graph is not strongly connected
142
+ n = len(G)
143
+ if len(forward_distances) != n or len(backward_distances) != n:
144
+ raise nx.NetworkXError("DiGraph not strongly connected.")
145
+ # take a node a_1 at the maximum distance from the source in G
146
+ *_, a_1 = forward_distances
147
+ # take a node a_2 at the maximum distance from the source in G_reversed
148
+ *_, a_2 = backward_distances
149
+ # return the max between the backward eccentricity of a_1 and the forward eccentricity of a_2
150
+ return max(nx.eccentricity(G_reversed, a_1), nx.eccentricity(G, a_2))
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/dominating_set.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for finding node and edge dominating sets.
2
+
3
+ A `dominating set`_ for an undirected graph *G* with vertex set *V*
4
+ and edge set *E* is a subset *D* of *V* such that every vertex not in
5
+ *D* is adjacent to at least one member of *D*. An `edge dominating set`_
6
+ is a subset *F* of *E* such that every edge not in *F* is
7
+ incident to an endpoint of at least one edge in *F*.
8
+
9
+ .. _dominating set: https://en.wikipedia.org/wiki/Dominating_set
10
+ .. _edge dominating set: https://en.wikipedia.org/wiki/Edge_dominating_set
11
+
12
+ """
13
+ import networkx as nx
14
+
15
+ from ...utils import not_implemented_for
16
+ from ..matching import maximal_matching
17
+
18
+ __all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"]
19
+
20
+
21
+ # TODO Why doesn't this algorithm work for directed graphs?
22
+ @not_implemented_for("directed")
23
+ @nx._dispatchable(node_attrs="weight")
24
+ def min_weighted_dominating_set(G, weight=None):
25
+ r"""Returns a dominating set that approximates the minimum weight node
26
+ dominating set.
27
+
28
+ Parameters
29
+ ----------
30
+ G : NetworkX graph
31
+ Undirected graph.
32
+
33
+ weight : string
34
+ The node attribute storing the weight of an node. If provided,
35
+ the node attribute with this key must be a number for each
36
+ node. If not provided, each node is assumed to have weight one.
37
+
38
+ Returns
39
+ -------
40
+ min_weight_dominating_set : set
41
+ A set of nodes, the sum of whose weights is no more than `(\log
42
+ w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of
43
+ each node in the graph and `w(V^*)` denotes the sum of the
44
+ weights of each node in the minimum weight dominating set.
45
+
46
+ Examples
47
+ --------
48
+ >>> G = nx.Graph([(0, 1), (0, 4), (1, 4), (1, 2), (2, 3), (3, 4), (2, 5)])
49
+ >>> nx.approximation.min_weighted_dominating_set(G)
50
+ {1, 2, 4}
51
+
52
+ Raises
53
+ ------
54
+ NetworkXNotImplemented
55
+ If G is directed.
56
+
57
+ Notes
58
+ -----
59
+ This algorithm computes an approximate minimum weighted dominating
60
+ set for the graph `G`. The returned solution has weight `(\log
61
+ w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of each
62
+ node in the graph and `w(V^*)` denotes the sum of the weights of
63
+ each node in the minimum weight dominating set for the graph.
64
+
65
+ This implementation of the algorithm runs in $O(m)$ time, where $m$
66
+ is the number of edges in the graph.
67
+
68
+ References
69
+ ----------
70
+ .. [1] Vazirani, Vijay V.
71
+ *Approximation Algorithms*.
72
+ Springer Science & Business Media, 2001.
73
+
74
+ """
75
+ # The unique dominating set for the null graph is the empty set.
76
+ if len(G) == 0:
77
+ return set()
78
+
79
+ # This is the dominating set that will eventually be returned.
80
+ dom_set = set()
81
+
82
+ def _cost(node_and_neighborhood):
83
+ """Returns the cost-effectiveness of greedily choosing the given
84
+ node.
85
+
86
+ `node_and_neighborhood` is a two-tuple comprising a node and its
87
+ closed neighborhood.
88
+
89
+ """
90
+ v, neighborhood = node_and_neighborhood
91
+ return G.nodes[v].get(weight, 1) / len(neighborhood - dom_set)
92
+
93
+ # This is a set of all vertices not already covered by the
94
+ # dominating set.
95
+ vertices = set(G)
96
+ # This is a dictionary mapping each node to the closed neighborhood
97
+ # of that node.
98
+ neighborhoods = {v: {v} | set(G[v]) for v in G}
99
+
100
+ # Continue until all vertices are adjacent to some node in the
101
+ # dominating set.
102
+ while vertices:
103
+ # Find the most cost-effective node to add, along with its
104
+ # closed neighborhood.
105
+ dom_node, min_set = min(neighborhoods.items(), key=_cost)
106
+ # Add the node to the dominating set and reduce the remaining
107
+ # set of nodes to cover.
108
+ dom_set.add(dom_node)
109
+ del neighborhoods[dom_node]
110
+ vertices -= min_set
111
+
112
+ return dom_set
113
+
114
+
115
+ @nx._dispatchable
116
+ def min_edge_dominating_set(G):
117
+ r"""Returns minimum cardinality edge dominating set.
118
+
119
+ Parameters
120
+ ----------
121
+ G : NetworkX graph
122
+ Undirected graph
123
+
124
+ Returns
125
+ -------
126
+ min_edge_dominating_set : set
127
+ Returns a set of dominating edges whose size is no more than 2 * OPT.
128
+
129
+ Examples
130
+ --------
131
+ >>> G = nx.petersen_graph()
132
+ >>> nx.approximation.min_edge_dominating_set(G)
133
+ {(0, 1), (4, 9), (6, 8), (5, 7), (2, 3)}
134
+
135
+ Raises
136
+ ------
137
+ ValueError
138
+ If the input graph `G` is empty.
139
+
140
+ Notes
141
+ -----
142
+ The algorithm computes an approximate solution to the edge dominating set
143
+ problem. The result is no more than 2 * OPT in terms of size of the set.
144
+ Runtime of the algorithm is $O(|E|)$.
145
+ """
146
+ if not G:
147
+ raise ValueError("Expected non-empty NetworkX graph!")
148
+ return maximal_matching(G)
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py ADDED
@@ -0,0 +1,369 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Fast approximation for k-component structure
2
+ """
3
+ import itertools
4
+ from collections import defaultdict
5
+ from collections.abc import Mapping
6
+ from functools import cached_property
7
+
8
+ import networkx as nx
9
+ from networkx.algorithms.approximation import local_node_connectivity
10
+ from networkx.exception import NetworkXError
11
+ from networkx.utils import not_implemented_for
12
+
13
+ __all__ = ["k_components"]
14
+
15
+
16
+ @not_implemented_for("directed")
17
+ @nx._dispatchable(name="approximate_k_components")
18
+ def k_components(G, min_density=0.95):
19
+ r"""Returns the approximate k-component structure of a graph G.
20
+
21
+ A `k`-component is a maximal subgraph of a graph G that has, at least,
22
+ node connectivity `k`: we need to remove at least `k` nodes to break it
23
+ into more components. `k`-components have an inherent hierarchical
24
+ structure because they are nested in terms of connectivity: a connected
25
+ graph can contain several 2-components, each of which can contain
26
+ one or more 3-components, and so forth.
27
+
28
+ This implementation is based on the fast heuristics to approximate
29
+ the `k`-component structure of a graph [1]_. Which, in turn, it is based on
30
+ a fast approximation algorithm for finding good lower bounds of the number
31
+ of node independent paths between two nodes [2]_.
32
+
33
+ Parameters
34
+ ----------
35
+ G : NetworkX graph
36
+ Undirected graph
37
+
38
+ min_density : Float
39
+ Density relaxation threshold. Default value 0.95
40
+
41
+ Returns
42
+ -------
43
+ k_components : dict
44
+ Dictionary with connectivity level `k` as key and a list of
45
+ sets of nodes that form a k-component of level `k` as values.
46
+
47
+ Raises
48
+ ------
49
+ NetworkXNotImplemented
50
+ If G is directed.
51
+
52
+ Examples
53
+ --------
54
+ >>> # Petersen graph has 10 nodes and it is triconnected, thus all
55
+ >>> # nodes are in a single component on all three connectivity levels
56
+ >>> from networkx.algorithms import approximation as apxa
57
+ >>> G = nx.petersen_graph()
58
+ >>> k_components = apxa.k_components(G)
59
+
60
+ Notes
61
+ -----
62
+ The logic of the approximation algorithm for computing the `k`-component
63
+ structure [1]_ is based on repeatedly applying simple and fast algorithms
64
+ for `k`-cores and biconnected components in order to narrow down the
65
+ number of pairs of nodes over which we have to compute White and Newman's
66
+ approximation algorithm for finding node independent paths [2]_. More
67
+ formally, this algorithm is based on Whitney's theorem, which states
68
+ an inclusion relation among node connectivity, edge connectivity, and
69
+ minimum degree for any graph G. This theorem implies that every
70
+ `k`-component is nested inside a `k`-edge-component, which in turn,
71
+ is contained in a `k`-core. Thus, this algorithm computes node independent
72
+ paths among pairs of nodes in each biconnected part of each `k`-core,
73
+ and repeats this procedure for each `k` from 3 to the maximal core number
74
+ of a node in the input graph.
75
+
76
+ Because, in practice, many nodes of the core of level `k` inside a
77
+ bicomponent actually are part of a component of level k, the auxiliary
78
+ graph needed for the algorithm is likely to be very dense. Thus, we use
79
+ a complement graph data structure (see `AntiGraph`) to save memory.
80
+ AntiGraph only stores information of the edges that are *not* present
81
+ in the actual auxiliary graph. When applying algorithms to this
82
+ complement graph data structure, it behaves as if it were the dense
83
+ version.
84
+
85
+ See also
86
+ --------
87
+ k_components
88
+
89
+ References
90
+ ----------
91
+ .. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion:
92
+ Visualization and Heuristics for Fast Computation.
93
+ https://arxiv.org/pdf/1503.04476v1
94
+
95
+ .. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
96
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
97
+ https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind
98
+
99
+ .. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness:
100
+ A hierarchical conception of social groups.
101
+ American Sociological Review 68(1), 103--28.
102
+ https://doi.org/10.2307/3088904
103
+
104
+ """
105
+ # Dictionary with connectivity level (k) as keys and a list of
106
+ # sets of nodes that form a k-component as values
107
+ k_components = defaultdict(list)
108
+ # make a few functions local for speed
109
+ node_connectivity = local_node_connectivity
110
+ k_core = nx.k_core
111
+ core_number = nx.core_number
112
+ biconnected_components = nx.biconnected_components
113
+ combinations = itertools.combinations
114
+ # Exact solution for k = {1,2}
115
+ # There is a linear time algorithm for triconnectivity, if we had an
116
+ # implementation available we could start from k = 4.
117
+ for component in nx.connected_components(G):
118
+ # isolated nodes have connectivity 0
119
+ comp = set(component)
120
+ if len(comp) > 1:
121
+ k_components[1].append(comp)
122
+ for bicomponent in nx.biconnected_components(G):
123
+ # avoid considering dyads as bicomponents
124
+ bicomp = set(bicomponent)
125
+ if len(bicomp) > 2:
126
+ k_components[2].append(bicomp)
127
+ # There is no k-component of k > maximum core number
128
+ # \kappa(G) <= \lambda(G) <= \delta(G)
129
+ g_cnumber = core_number(G)
130
+ max_core = max(g_cnumber.values())
131
+ for k in range(3, max_core + 1):
132
+ C = k_core(G, k, core_number=g_cnumber)
133
+ for nodes in biconnected_components(C):
134
+ # Build a subgraph SG induced by the nodes that are part of
135
+ # each biconnected component of the k-core subgraph C.
136
+ if len(nodes) < k:
137
+ continue
138
+ SG = G.subgraph(nodes)
139
+ # Build auxiliary graph
140
+ H = _AntiGraph()
141
+ H.add_nodes_from(SG.nodes())
142
+ for u, v in combinations(SG, 2):
143
+ K = node_connectivity(SG, u, v, cutoff=k)
144
+ if k > K:
145
+ H.add_edge(u, v)
146
+ for h_nodes in biconnected_components(H):
147
+ if len(h_nodes) <= k:
148
+ continue
149
+ SH = H.subgraph(h_nodes)
150
+ for Gc in _cliques_heuristic(SG, SH, k, min_density):
151
+ for k_nodes in biconnected_components(Gc):
152
+ Gk = nx.k_core(SG.subgraph(k_nodes), k)
153
+ if len(Gk) <= k:
154
+ continue
155
+ k_components[k].append(set(Gk))
156
+ return k_components
157
+
158
+
159
+ def _cliques_heuristic(G, H, k, min_density):
160
+ h_cnumber = nx.core_number(H)
161
+ for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)):
162
+ cands = {n for n, c in h_cnumber.items() if c == c_value}
163
+ # Skip checking for overlap for the highest core value
164
+ if i == 0:
165
+ overlap = False
166
+ else:
167
+ overlap = set.intersection(
168
+ *[{x for x in H[n] if x not in cands} for n in cands]
169
+ )
170
+ if overlap and len(overlap) < k:
171
+ SH = H.subgraph(cands | overlap)
172
+ else:
173
+ SH = H.subgraph(cands)
174
+ sh_cnumber = nx.core_number(SH)
175
+ SG = nx.k_core(G.subgraph(SH), k)
176
+ while not (_same(sh_cnumber) and nx.density(SH) >= min_density):
177
+ # This subgraph must be writable => .copy()
178
+ SH = H.subgraph(SG).copy()
179
+ if len(SH) <= k:
180
+ break
181
+ sh_cnumber = nx.core_number(SH)
182
+ sh_deg = dict(SH.degree())
183
+ min_deg = min(sh_deg.values())
184
+ SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg)
185
+ SG = nx.k_core(G.subgraph(SH), k)
186
+ else:
187
+ yield SG
188
+
189
+
190
+ def _same(measure, tol=0):
191
+ vals = set(measure.values())
192
+ if (max(vals) - min(vals)) <= tol:
193
+ return True
194
+ return False
195
+
196
+
197
+ class _AntiGraph(nx.Graph):
198
+ """
199
+ Class for complement graphs.
200
+
201
+ The main goal is to be able to work with big and dense graphs with
202
+ a low memory footprint.
203
+
204
+ In this class you add the edges that *do not exist* in the dense graph,
205
+ the report methods of the class return the neighbors, the edges and
206
+ the degree as if it was the dense graph. Thus it's possible to use
207
+ an instance of this class with some of NetworkX functions. In this
208
+ case we only use k-core, connected_components, and biconnected_components.
209
+ """
210
+
211
+ all_edge_dict = {"weight": 1}
212
+
213
+ def single_edge_dict(self):
214
+ return self.all_edge_dict
215
+
216
+ edge_attr_dict_factory = single_edge_dict # type: ignore[assignment]
217
+
218
+ def __getitem__(self, n):
219
+ """Returns a dict of neighbors of node n in the dense graph.
220
+
221
+ Parameters
222
+ ----------
223
+ n : node
224
+ A node in the graph.
225
+
226
+ Returns
227
+ -------
228
+ adj_dict : dictionary
229
+ The adjacency dictionary for nodes connected to n.
230
+
231
+ """
232
+ all_edge_dict = self.all_edge_dict
233
+ return {
234
+ node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n}
235
+ }
236
+
237
+ def neighbors(self, n):
238
+ """Returns an iterator over all neighbors of node n in the
239
+ dense graph.
240
+ """
241
+ try:
242
+ return iter(set(self._adj) - set(self._adj[n]) - {n})
243
+ except KeyError as err:
244
+ raise NetworkXError(f"The node {n} is not in the graph.") from err
245
+
246
+ class AntiAtlasView(Mapping):
247
+ """An adjacency inner dict for AntiGraph"""
248
+
249
+ def __init__(self, graph, node):
250
+ self._graph = graph
251
+ self._atlas = graph._adj[node]
252
+ self._node = node
253
+
254
+ def __len__(self):
255
+ return len(self._graph) - len(self._atlas) - 1
256
+
257
+ def __iter__(self):
258
+ return (n for n in self._graph if n not in self._atlas and n != self._node)
259
+
260
+ def __getitem__(self, nbr):
261
+ nbrs = set(self._graph._adj) - set(self._atlas) - {self._node}
262
+ if nbr in nbrs:
263
+ return self._graph.all_edge_dict
264
+ raise KeyError(nbr)
265
+
266
+ class AntiAdjacencyView(AntiAtlasView):
267
+ """An adjacency outer dict for AntiGraph"""
268
+
269
+ def __init__(self, graph):
270
+ self._graph = graph
271
+ self._atlas = graph._adj
272
+
273
+ def __len__(self):
274
+ return len(self._atlas)
275
+
276
+ def __iter__(self):
277
+ return iter(self._graph)
278
+
279
+ def __getitem__(self, node):
280
+ if node not in self._graph:
281
+ raise KeyError(node)
282
+ return self._graph.AntiAtlasView(self._graph, node)
283
+
284
+ @cached_property
285
+ def adj(self):
286
+ return self.AntiAdjacencyView(self)
287
+
288
+ def subgraph(self, nodes):
289
+ """This subgraph method returns a full AntiGraph. Not a View"""
290
+ nodes = set(nodes)
291
+ G = _AntiGraph()
292
+ G.add_nodes_from(nodes)
293
+ for n in G:
294
+ Gnbrs = G.adjlist_inner_dict_factory()
295
+ G._adj[n] = Gnbrs
296
+ for nbr, d in self._adj[n].items():
297
+ if nbr in G._adj:
298
+ Gnbrs[nbr] = d
299
+ G._adj[nbr][n] = d
300
+ G.graph = self.graph
301
+ return G
302
+
303
+ class AntiDegreeView(nx.reportviews.DegreeView):
304
+ def __iter__(self):
305
+ all_nodes = set(self._succ)
306
+ for n in self._nodes:
307
+ nbrs = all_nodes - set(self._succ[n]) - {n}
308
+ yield (n, len(nbrs))
309
+
310
+ def __getitem__(self, n):
311
+ nbrs = set(self._succ) - set(self._succ[n]) - {n}
312
+ # AntiGraph is a ThinGraph so all edges have weight 1
313
+ return len(nbrs) + (n in nbrs)
314
+
315
+ @cached_property
316
+ def degree(self):
317
+ """Returns an iterator for (node, degree) and degree for single node.
318
+
319
+ The node degree is the number of edges adjacent to the node.
320
+
321
+ Parameters
322
+ ----------
323
+ nbunch : iterable container, optional (default=all nodes)
324
+ A container of nodes. The container will be iterated
325
+ through once.
326
+
327
+ weight : string or None, optional (default=None)
328
+ The edge attribute that holds the numerical value used
329
+ as a weight. If None, then each edge has weight 1.
330
+ The degree is the sum of the edge weights adjacent to the node.
331
+
332
+ Returns
333
+ -------
334
+ deg:
335
+ Degree of the node, if a single node is passed as argument.
336
+ nd_iter : an iterator
337
+ The iterator returns two-tuples of (node, degree).
338
+
339
+ See Also
340
+ --------
341
+ degree
342
+
343
+ Examples
344
+ --------
345
+ >>> G = nx.path_graph(4)
346
+ >>> G.degree(0) # node 0 with degree 1
347
+ 1
348
+ >>> list(G.degree([0, 1]))
349
+ [(0, 1), (1, 2)]
350
+
351
+ """
352
+ return self.AntiDegreeView(self)
353
+
354
+ def adjacency(self):
355
+ """Returns an iterator of (node, adjacency set) tuples for all nodes
356
+ in the dense graph.
357
+
358
+ This is the fastest way to look at every edge.
359
+ For directed graphs, only outgoing adjacencies are included.
360
+
361
+ Returns
362
+ -------
363
+ adj_iter : iterator
364
+ An iterator of (node, adjacency set) for all nodes in
365
+ the graph.
366
+
367
+ """
368
+ for n in self._adj:
369
+ yield (n, set(self._adj) - set(self._adj[n]) - {n})
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/matching.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ **************
3
+ Graph Matching
4
+ **************
5
+
6
+ Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
7
+ edges; that is, no two edges share a common vertex.
8
+
9
+ `Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
10
+ """
11
+ import networkx as nx
12
+
13
+ __all__ = ["min_maximal_matching"]
14
+
15
+
16
+ @nx._dispatchable
17
+ def min_maximal_matching(G):
18
+ r"""Returns the minimum maximal matching of G. That is, out of all maximal
19
+ matchings of the graph G, the smallest is returned.
20
+
21
+ Parameters
22
+ ----------
23
+ G : NetworkX graph
24
+ Undirected graph
25
+
26
+ Returns
27
+ -------
28
+ min_maximal_matching : set
29
+ Returns a set of edges such that no two edges share a common endpoint
30
+ and every edge not in the set shares some common endpoint in the set.
31
+ Cardinality will be 2*OPT in the worst case.
32
+
33
+ Notes
34
+ -----
35
+ The algorithm computes an approximate solution for the minimum maximal
36
+ cardinality matching problem. The solution is no more than 2 * OPT in size.
37
+ Runtime is $O(|E|)$.
38
+
39
+ References
40
+ ----------
41
+ .. [1] Vazirani, Vijay Approximation Algorithms (2001)
42
+ """
43
+ return nx.maximal_matching(G)
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/maxcut.py ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.utils.decorators import not_implemented_for, py_random_state
3
+
4
+ __all__ = ["randomized_partitioning", "one_exchange"]
5
+
6
+
7
+ @not_implemented_for("directed")
8
+ @not_implemented_for("multigraph")
9
+ @py_random_state(1)
10
+ @nx._dispatchable(edge_attrs="weight")
11
+ def randomized_partitioning(G, seed=None, p=0.5, weight=None):
12
+ """Compute a random partitioning of the graph nodes and its cut value.
13
+
14
+ A partitioning is calculated by observing each node
15
+ and deciding to add it to the partition with probability `p`,
16
+ returning a random cut and its corresponding value (the
17
+ sum of weights of edges connecting different partitions).
18
+
19
+ Parameters
20
+ ----------
21
+ G : NetworkX graph
22
+
23
+ seed : integer, random_state, or None (default)
24
+ Indicator of random number generation state.
25
+ See :ref:`Randomness<randomness>`.
26
+
27
+ p : scalar
28
+ Probability for each node to be part of the first partition.
29
+ Should be in [0,1]
30
+
31
+ weight : object
32
+ Edge attribute key to use as weight. If not specified, edges
33
+ have weight one.
34
+
35
+ Returns
36
+ -------
37
+ cut_size : scalar
38
+ Value of the minimum cut.
39
+
40
+ partition : pair of node sets
41
+ A partitioning of the nodes that defines a minimum cut.
42
+
43
+ Examples
44
+ --------
45
+ >>> G = nx.complete_graph(5)
46
+ >>> cut_size, partition = nx.approximation.randomized_partitioning(G, seed=1)
47
+ >>> cut_size
48
+ 6
49
+ >>> partition
50
+ ({0, 3, 4}, {1, 2})
51
+
52
+ Raises
53
+ ------
54
+ NetworkXNotImplemented
55
+ If the graph is directed or is a multigraph.
56
+ """
57
+ cut = {node for node in G.nodes() if seed.random() < p}
58
+ cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
59
+ partition = (cut, G.nodes - cut)
60
+ return cut_size, partition
61
+
62
+
63
+ def _swap_node_partition(cut, node):
64
+ return cut - {node} if node in cut else cut.union({node})
65
+
66
+
67
+ @not_implemented_for("directed")
68
+ @not_implemented_for("multigraph")
69
+ @py_random_state(2)
70
+ @nx._dispatchable(edge_attrs="weight")
71
+ def one_exchange(G, initial_cut=None, seed=None, weight=None):
72
+ """Compute a partitioning of the graphs nodes and the corresponding cut value.
73
+
74
+ Use a greedy one exchange strategy to find a locally maximal cut
75
+ and its value, it works by finding the best node (one that gives
76
+ the highest gain to the cut value) to add to the current cut
77
+ and repeats this process until no improvement can be made.
78
+
79
+ Parameters
80
+ ----------
81
+ G : networkx Graph
82
+ Graph to find a maximum cut for.
83
+
84
+ initial_cut : set
85
+ Cut to use as a starting point. If not supplied the algorithm
86
+ starts with an empty cut.
87
+
88
+ seed : integer, random_state, or None (default)
89
+ Indicator of random number generation state.
90
+ See :ref:`Randomness<randomness>`.
91
+
92
+ weight : object
93
+ Edge attribute key to use as weight. If not specified, edges
94
+ have weight one.
95
+
96
+ Returns
97
+ -------
98
+ cut_value : scalar
99
+ Value of the maximum cut.
100
+
101
+ partition : pair of node sets
102
+ A partitioning of the nodes that defines a maximum cut.
103
+
104
+ Examples
105
+ --------
106
+ >>> G = nx.complete_graph(5)
107
+ >>> curr_cut_size, partition = nx.approximation.one_exchange(G, seed=1)
108
+ >>> curr_cut_size
109
+ 6
110
+ >>> partition
111
+ ({0, 2}, {1, 3, 4})
112
+
113
+ Raises
114
+ ------
115
+ NetworkXNotImplemented
116
+ If the graph is directed or is a multigraph.
117
+ """
118
+ if initial_cut is None:
119
+ initial_cut = set()
120
+ cut = set(initial_cut)
121
+ current_cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
122
+ while True:
123
+ nodes = list(G.nodes())
124
+ # Shuffling the nodes ensures random tie-breaks in the following call to max
125
+ seed.shuffle(nodes)
126
+ best_node_to_swap = max(
127
+ nodes,
128
+ key=lambda v: nx.algorithms.cut_size(
129
+ G, _swap_node_partition(cut, v), weight=weight
130
+ ),
131
+ default=None,
132
+ )
133
+ potential_cut = _swap_node_partition(cut, best_node_to_swap)
134
+ potential_cut_size = nx.algorithms.cut_size(G, potential_cut, weight=weight)
135
+
136
+ if potential_cut_size > current_cut_size:
137
+ cut = potential_cut
138
+ current_cut_size = potential_cut_size
139
+ else:
140
+ break
141
+
142
+ partition = (cut, G.nodes - cut)
143
+ return current_cut_size, partition
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/ramsey.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Ramsey numbers.
3
+ """
4
+ import networkx as nx
5
+ from networkx.utils import not_implemented_for
6
+
7
+ from ...utils import arbitrary_element
8
+
9
+ __all__ = ["ramsey_R2"]
10
+
11
+
12
+ @not_implemented_for("directed")
13
+ @not_implemented_for("multigraph")
14
+ @nx._dispatchable
15
+ def ramsey_R2(G):
16
+ r"""Compute the largest clique and largest independent set in `G`.
17
+
18
+ This can be used to estimate bounds for the 2-color
19
+ Ramsey number `R(2;s,t)` for `G`.
20
+
21
+ This is a recursive implementation which could run into trouble
22
+ for large recursions. Note that self-loop edges are ignored.
23
+
24
+ Parameters
25
+ ----------
26
+ G : NetworkX graph
27
+ Undirected graph
28
+
29
+ Returns
30
+ -------
31
+ max_pair : (set, set) tuple
32
+ Maximum clique, Maximum independent set.
33
+
34
+ Raises
35
+ ------
36
+ NetworkXNotImplemented
37
+ If the graph is directed or is a multigraph.
38
+ """
39
+ if not G:
40
+ return set(), set()
41
+
42
+ node = arbitrary_element(G)
43
+ nbrs = (nbr for nbr in nx.all_neighbors(G, node) if nbr != node)
44
+ nnbrs = nx.non_neighbors(G, node)
45
+ c_1, i_1 = ramsey_R2(G.subgraph(nbrs).copy())
46
+ c_2, i_2 = ramsey_R2(G.subgraph(nnbrs).copy())
47
+
48
+ c_1.add(node)
49
+ i_2.add(node)
50
+ # Choose the larger of the two cliques and the larger of the two
51
+ # independent sets, according to cardinality.
52
+ return max(c_1, c_2, key=len), max(i_1, i_2, key=len)
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py ADDED
@@ -0,0 +1,1498 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ =================================
3
+ Travelling Salesman Problem (TSP)
4
+ =================================
5
+
6
+ Implementation of approximate algorithms
7
+ for solving and approximating the TSP problem.
8
+
9
+ Categories of algorithms which are implemented:
10
+
11
+ - Christofides (provides a 3/2-approximation of TSP)
12
+ - Greedy
13
+ - Simulated Annealing (SA)
14
+ - Threshold Accepting (TA)
15
+ - Asadpour Asymmetric Traveling Salesman Algorithm
16
+
17
+ The Travelling Salesman Problem tries to find, given the weight
18
+ (distance) between all points where a salesman has to visit, the
19
+ route so that:
20
+
21
+ - The total distance (cost) which the salesman travels is minimized.
22
+ - The salesman returns to the starting point.
23
+ - Note that for a complete graph, the salesman visits each point once.
24
+
25
+ The function `travelling_salesman_problem` allows for incomplete
26
+ graphs by finding all-pairs shortest paths, effectively converting
27
+ the problem to a complete graph problem. It calls one of the
28
+ approximate methods on that problem and then converts the result
29
+ back to the original graph using the previously found shortest paths.
30
+
31
+ TSP is an NP-hard problem in combinatorial optimization,
32
+ important in operations research and theoretical computer science.
33
+
34
+ http://en.wikipedia.org/wiki/Travelling_salesman_problem
35
+ """
36
+ import math
37
+
38
+ import networkx as nx
39
+ from networkx.algorithms.tree.mst import random_spanning_tree
40
+ from networkx.utils import not_implemented_for, pairwise, py_random_state
41
+
42
+ __all__ = [
43
+ "traveling_salesman_problem",
44
+ "christofides",
45
+ "asadpour_atsp",
46
+ "greedy_tsp",
47
+ "simulated_annealing_tsp",
48
+ "threshold_accepting_tsp",
49
+ ]
50
+
51
+
52
+ def swap_two_nodes(soln, seed):
53
+ """Swap two nodes in `soln` to give a neighbor solution.
54
+
55
+ Parameters
56
+ ----------
57
+ soln : list of nodes
58
+ Current cycle of nodes
59
+
60
+ seed : integer, random_state, or None (default)
61
+ Indicator of random number generation state.
62
+ See :ref:`Randomness<randomness>`.
63
+
64
+ Returns
65
+ -------
66
+ list
67
+ The solution after move is applied. (A neighbor solution.)
68
+
69
+ Notes
70
+ -----
71
+ This function assumes that the incoming list `soln` is a cycle
72
+ (that the first and last element are the same) and also that
73
+ we don't want any move to change the first node in the list
74
+ (and thus not the last node either).
75
+
76
+ The input list is changed as well as returned. Make a copy if needed.
77
+
78
+ See Also
79
+ --------
80
+ move_one_node
81
+ """
82
+ a, b = seed.sample(range(1, len(soln) - 1), k=2)
83
+ soln[a], soln[b] = soln[b], soln[a]
84
+ return soln
85
+
86
+
87
+ def move_one_node(soln, seed):
88
+ """Move one node to another position to give a neighbor solution.
89
+
90
+ The node to move and the position to move to are chosen randomly.
91
+ The first and last nodes are left untouched as soln must be a cycle
92
+ starting at that node.
93
+
94
+ Parameters
95
+ ----------
96
+ soln : list of nodes
97
+ Current cycle of nodes
98
+
99
+ seed : integer, random_state, or None (default)
100
+ Indicator of random number generation state.
101
+ See :ref:`Randomness<randomness>`.
102
+
103
+ Returns
104
+ -------
105
+ list
106
+ The solution after move is applied. (A neighbor solution.)
107
+
108
+ Notes
109
+ -----
110
+ This function assumes that the incoming list `soln` is a cycle
111
+ (that the first and last element are the same) and also that
112
+ we don't want any move to change the first node in the list
113
+ (and thus not the last node either).
114
+
115
+ The input list is changed as well as returned. Make a copy if needed.
116
+
117
+ See Also
118
+ --------
119
+ swap_two_nodes
120
+ """
121
+ a, b = seed.sample(range(1, len(soln) - 1), k=2)
122
+ soln.insert(b, soln.pop(a))
123
+ return soln
124
+
125
+
126
+ @not_implemented_for("directed")
127
+ @nx._dispatchable(edge_attrs="weight")
128
+ def christofides(G, weight="weight", tree=None):
129
+ """Approximate a solution of the traveling salesman problem
130
+
131
+ Compute a 3/2-approximation of the traveling salesman problem
132
+ in a complete undirected graph using Christofides [1]_ algorithm.
133
+
134
+ Parameters
135
+ ----------
136
+ G : Graph
137
+ `G` should be a complete weighted undirected graph.
138
+ The distance between all pairs of nodes should be included.
139
+
140
+ weight : string, optional (default="weight")
141
+ Edge data key corresponding to the edge weight.
142
+ If any edge does not have this attribute the weight is set to 1.
143
+
144
+ tree : NetworkX graph or None (default: None)
145
+ A minimum spanning tree of G. Or, if None, the minimum spanning
146
+ tree is computed using :func:`networkx.minimum_spanning_tree`
147
+
148
+ Returns
149
+ -------
150
+ list
151
+ List of nodes in `G` along a cycle with a 3/2-approximation of
152
+ the minimal Hamiltonian cycle.
153
+
154
+ References
155
+ ----------
156
+ .. [1] Christofides, Nicos. "Worst-case analysis of a new heuristic for
157
+ the travelling salesman problem." No. RR-388. Carnegie-Mellon Univ
158
+ Pittsburgh Pa Management Sciences Research Group, 1976.
159
+ """
160
+ # Remove selfloops if necessary
161
+ loop_nodes = nx.nodes_with_selfloops(G)
162
+ try:
163
+ node = next(loop_nodes)
164
+ except StopIteration:
165
+ pass
166
+ else:
167
+ G = G.copy()
168
+ G.remove_edge(node, node)
169
+ G.remove_edges_from((n, n) for n in loop_nodes)
170
+ # Check that G is a complete graph
171
+ N = len(G) - 1
172
+ # This check ignores selfloops which is what we want here.
173
+ if any(len(nbrdict) != N for n, nbrdict in G.adj.items()):
174
+ raise nx.NetworkXError("G must be a complete graph.")
175
+
176
+ if tree is None:
177
+ tree = nx.minimum_spanning_tree(G, weight=weight)
178
+ L = G.copy()
179
+ L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)])
180
+ MG = nx.MultiGraph()
181
+ MG.add_edges_from(tree.edges)
182
+ edges = nx.min_weight_matching(L, weight=weight)
183
+ MG.add_edges_from(edges)
184
+ return _shortcutting(nx.eulerian_circuit(MG))
185
+
186
+
187
+ def _shortcutting(circuit):
188
+ """Remove duplicate nodes in the path"""
189
+ nodes = []
190
+ for u, v in circuit:
191
+ if v in nodes:
192
+ continue
193
+ if not nodes:
194
+ nodes.append(u)
195
+ nodes.append(v)
196
+ nodes.append(nodes[0])
197
+ return nodes
198
+
199
+
200
+ @nx._dispatchable(edge_attrs="weight")
201
+ def traveling_salesman_problem(
202
+ G, weight="weight", nodes=None, cycle=True, method=None, **kwargs
203
+ ):
204
+ """Find the shortest path in `G` connecting specified nodes
205
+
206
+ This function allows approximate solution to the traveling salesman
207
+ problem on networks that are not complete graphs and/or where the
208
+ salesman does not need to visit all nodes.
209
+
210
+ This function proceeds in two steps. First, it creates a complete
211
+ graph using the all-pairs shortest_paths between nodes in `nodes`.
212
+ Edge weights in the new graph are the lengths of the paths
213
+ between each pair of nodes in the original graph.
214
+ Second, an algorithm (default: `christofides` for undirected and
215
+ `asadpour_atsp` for directed) is used to approximate the minimal Hamiltonian
216
+ cycle on this new graph. The available algorithms are:
217
+
218
+ - christofides
219
+ - greedy_tsp
220
+ - simulated_annealing_tsp
221
+ - threshold_accepting_tsp
222
+ - asadpour_atsp
223
+
224
+ Once the Hamiltonian Cycle is found, this function post-processes to
225
+ accommodate the structure of the original graph. If `cycle` is ``False``,
226
+ the biggest weight edge is removed to make a Hamiltonian path.
227
+ Then each edge on the new complete graph used for that analysis is
228
+ replaced by the shortest_path between those nodes on the original graph.
229
+ If the input graph `G` includes edges with weights that do not adhere to
230
+ the triangle inequality, such as when `G` is not a complete graph (i.e
231
+ length of non-existent edges is infinity), then the returned path may
232
+ contain some repeating nodes (other than the starting node).
233
+
234
+ Parameters
235
+ ----------
236
+ G : NetworkX graph
237
+ A possibly weighted graph
238
+
239
+ nodes : collection of nodes (default=G.nodes)
240
+ collection (list, set, etc.) of nodes to visit
241
+
242
+ weight : string, optional (default="weight")
243
+ Edge data key corresponding to the edge weight.
244
+ If any edge does not have this attribute the weight is set to 1.
245
+
246
+ cycle : bool (default: True)
247
+ Indicates whether a cycle should be returned, or a path.
248
+ Note: the cycle is the approximate minimal cycle.
249
+ The path simply removes the biggest edge in that cycle.
250
+
251
+ method : function (default: None)
252
+ A function that returns a cycle on all nodes and approximates
253
+ the solution to the traveling salesman problem on a complete
254
+ graph. The returned cycle is then used to find a corresponding
255
+ solution on `G`. `method` should be callable; take inputs
256
+ `G`, and `weight`; and return a list of nodes along the cycle.
257
+
258
+ Provided options include :func:`christofides`, :func:`greedy_tsp`,
259
+ :func:`simulated_annealing_tsp` and :func:`threshold_accepting_tsp`.
260
+
261
+ If `method is None`: use :func:`christofides` for undirected `G` and
262
+ :func:`asadpour_atsp` for directed `G`.
263
+
264
+ **kwargs : dict
265
+ Other keyword arguments to be passed to the `method` function passed in.
266
+
267
+ Returns
268
+ -------
269
+ list
270
+ List of nodes in `G` along a path with an approximation of the minimal
271
+ path through `nodes`.
272
+
273
+ Raises
274
+ ------
275
+ NetworkXError
276
+ If `G` is a directed graph it has to be strongly connected or the
277
+ complete version cannot be generated.
278
+
279
+ Examples
280
+ --------
281
+ >>> tsp = nx.approximation.traveling_salesman_problem
282
+ >>> G = nx.cycle_graph(9)
283
+ >>> G[4][5]["weight"] = 5 # all other weights are 1
284
+ >>> tsp(G, nodes=[3, 6])
285
+ [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3]
286
+ >>> path = tsp(G, cycle=False)
287
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
288
+ True
289
+
290
+ While no longer required, you can still build (curry) your own function
291
+ to provide parameter values to the methods.
292
+
293
+ >>> SA_tsp = nx.approximation.simulated_annealing_tsp
294
+ >>> method = lambda G, weight: SA_tsp(G, "greedy", weight=weight, temp=500)
295
+ >>> path = tsp(G, cycle=False, method=method)
296
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
297
+ True
298
+
299
+ Otherwise, pass other keyword arguments directly into the tsp function.
300
+
301
+ >>> path = tsp(
302
+ ... G,
303
+ ... cycle=False,
304
+ ... method=nx.approximation.simulated_annealing_tsp,
305
+ ... init_cycle="greedy",
306
+ ... temp=500,
307
+ ... )
308
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
309
+ True
310
+ """
311
+ if method is None:
312
+ if G.is_directed():
313
+ method = asadpour_atsp
314
+ else:
315
+ method = christofides
316
+ if nodes is None:
317
+ nodes = list(G.nodes)
318
+
319
+ dist = {}
320
+ path = {}
321
+ for n, (d, p) in nx.all_pairs_dijkstra(G, weight=weight):
322
+ dist[n] = d
323
+ path[n] = p
324
+
325
+ if G.is_directed():
326
+ # If the graph is not strongly connected, raise an exception
327
+ if not nx.is_strongly_connected(G):
328
+ raise nx.NetworkXError("G is not strongly connected")
329
+ GG = nx.DiGraph()
330
+ else:
331
+ GG = nx.Graph()
332
+ for u in nodes:
333
+ for v in nodes:
334
+ if u == v:
335
+ continue
336
+ GG.add_edge(u, v, weight=dist[u][v])
337
+
338
+ best_GG = method(GG, weight=weight, **kwargs)
339
+
340
+ if not cycle:
341
+ # find and remove the biggest edge
342
+ (u, v) = max(pairwise(best_GG), key=lambda x: dist[x[0]][x[1]])
343
+ pos = best_GG.index(u) + 1
344
+ while best_GG[pos] != v:
345
+ pos = best_GG[pos:].index(u) + 1
346
+ best_GG = best_GG[pos:-1] + best_GG[:pos]
347
+
348
+ best_path = []
349
+ for u, v in pairwise(best_GG):
350
+ best_path.extend(path[u][v][:-1])
351
+ best_path.append(v)
352
+ return best_path
353
+
354
+
355
+ @not_implemented_for("undirected")
356
+ @py_random_state(2)
357
+ @nx._dispatchable(edge_attrs="weight", mutates_input=True)
358
+ def asadpour_atsp(G, weight="weight", seed=None, source=None):
359
+ """
360
+ Returns an approximate solution to the traveling salesman problem.
361
+
362
+ This approximate solution is one of the best known approximations for the
363
+ asymmetric traveling salesman problem developed by Asadpour et al,
364
+ [1]_. The algorithm first solves the Held-Karp relaxation to find a lower
365
+ bound for the weight of the cycle. Next, it constructs an exponential
366
+ distribution of undirected spanning trees where the probability of an
367
+ edge being in the tree corresponds to the weight of that edge using a
368
+ maximum entropy rounding scheme. Next we sample that distribution
369
+ $2 \\lceil \\ln n \\rceil$ times and save the minimum sampled tree once the
370
+ direction of the arcs is added back to the edges. Finally, we augment
371
+ then short circuit that graph to find the approximate tour for the
372
+ salesman.
373
+
374
+ Parameters
375
+ ----------
376
+ G : nx.DiGraph
377
+ The graph should be a complete weighted directed graph. The
378
+ distance between all paris of nodes should be included and the triangle
379
+ inequality should hold. That is, the direct edge between any two nodes
380
+ should be the path of least cost.
381
+
382
+ weight : string, optional (default="weight")
383
+ Edge data key corresponding to the edge weight.
384
+ If any edge does not have this attribute the weight is set to 1.
385
+
386
+ seed : integer, random_state, or None (default)
387
+ Indicator of random number generation state.
388
+ See :ref:`Randomness<randomness>`.
389
+
390
+ source : node label (default=`None`)
391
+ If given, return the cycle starting and ending at the given node.
392
+
393
+ Returns
394
+ -------
395
+ cycle : list of nodes
396
+ Returns the cycle (list of nodes) that a salesman can follow to minimize
397
+ the total weight of the trip.
398
+
399
+ Raises
400
+ ------
401
+ NetworkXError
402
+ If `G` is not complete or has less than two nodes, the algorithm raises
403
+ an exception.
404
+
405
+ NetworkXError
406
+ If `source` is not `None` and is not a node in `G`, the algorithm raises
407
+ an exception.
408
+
409
+ NetworkXNotImplemented
410
+ If `G` is an undirected graph.
411
+
412
+ References
413
+ ----------
414
+ .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
415
+ An o(log n/log log n)-approximation algorithm for the asymmetric
416
+ traveling salesman problem, Operations research, 65 (2017),
417
+ pp. 1043–1061
418
+
419
+ Examples
420
+ --------
421
+ >>> import networkx as nx
422
+ >>> import networkx.algorithms.approximation as approx
423
+ >>> G = nx.complete_graph(3, create_using=nx.DiGraph)
424
+ >>> nx.set_edge_attributes(
425
+ ... G, {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1}, "weight"
426
+ ... )
427
+ >>> tour = approx.asadpour_atsp(G, source=0)
428
+ >>> tour
429
+ [0, 2, 1, 0]
430
+ """
431
+ from math import ceil, exp
432
+ from math import log as ln
433
+
434
+ # Check that G is a complete graph
435
+ N = len(G) - 1
436
+ if N < 2:
437
+ raise nx.NetworkXError("G must have at least two nodes")
438
+ # This check ignores selfloops which is what we want here.
439
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
440
+ raise nx.NetworkXError("G is not a complete DiGraph")
441
+ # Check that the source vertex, if given, is in the graph
442
+ if source is not None and source not in G.nodes:
443
+ raise nx.NetworkXError("Given source node not in G.")
444
+
445
+ opt_hk, z_star = held_karp_ascent(G, weight)
446
+
447
+ # Test to see if the ascent method found an integer solution or a fractional
448
+ # solution. If it is integral then z_star is a nx.Graph, otherwise it is
449
+ # a dict
450
+ if not isinstance(z_star, dict):
451
+ # Here we are using the shortcutting method to go from the list of edges
452
+ # returned from eulerian_circuit to a list of nodes
453
+ return _shortcutting(nx.eulerian_circuit(z_star, source=source))
454
+
455
+ # Create the undirected support of z_star
456
+ z_support = nx.MultiGraph()
457
+ for u, v in z_star:
458
+ if (u, v) not in z_support.edges:
459
+ edge_weight = min(G[u][v][weight], G[v][u][weight])
460
+ z_support.add_edge(u, v, **{weight: edge_weight})
461
+
462
+ # Create the exponential distribution of spanning trees
463
+ gamma = spanning_tree_distribution(z_support, z_star)
464
+
465
+ # Write the lambda values to the edges of z_support
466
+ z_support = nx.Graph(z_support)
467
+ lambda_dict = {(u, v): exp(gamma[(u, v)]) for u, v in z_support.edges()}
468
+ nx.set_edge_attributes(z_support, lambda_dict, "weight")
469
+ del gamma, lambda_dict
470
+
471
+ # Sample 2 * ceil( ln(n) ) spanning trees and record the minimum one
472
+ minimum_sampled_tree = None
473
+ minimum_sampled_tree_weight = math.inf
474
+ for _ in range(2 * ceil(ln(G.number_of_nodes()))):
475
+ sampled_tree = random_spanning_tree(z_support, "weight", seed=seed)
476
+ sampled_tree_weight = sampled_tree.size(weight)
477
+ if sampled_tree_weight < minimum_sampled_tree_weight:
478
+ minimum_sampled_tree = sampled_tree.copy()
479
+ minimum_sampled_tree_weight = sampled_tree_weight
480
+
481
+ # Orient the edges in that tree to keep the cost of the tree the same.
482
+ t_star = nx.MultiDiGraph()
483
+ for u, v, d in minimum_sampled_tree.edges(data=weight):
484
+ if d == G[u][v][weight]:
485
+ t_star.add_edge(u, v, **{weight: d})
486
+ else:
487
+ t_star.add_edge(v, u, **{weight: d})
488
+
489
+ # Find the node demands needed to neutralize the flow of t_star in G
490
+ node_demands = {n: t_star.out_degree(n) - t_star.in_degree(n) for n in t_star}
491
+ nx.set_node_attributes(G, node_demands, "demand")
492
+
493
+ # Find the min_cost_flow
494
+ flow_dict = nx.min_cost_flow(G, "demand")
495
+
496
+ # Build the flow into t_star
497
+ for source, values in flow_dict.items():
498
+ for target in values:
499
+ if (source, target) not in t_star.edges and values[target] > 0:
500
+ # IF values[target] > 0 we have to add that many edges
501
+ for _ in range(values[target]):
502
+ t_star.add_edge(source, target)
503
+
504
+ # Return the shortcut eulerian circuit
505
+ circuit = nx.eulerian_circuit(t_star, source=source)
506
+ return _shortcutting(circuit)
507
+
508
+
509
+ @nx._dispatchable(edge_attrs="weight", mutates_input=True, returns_graph=True)
510
+ def held_karp_ascent(G, weight="weight"):
511
+ """
512
+ Minimizes the Held-Karp relaxation of the TSP for `G`
513
+
514
+ Solves the Held-Karp relaxation of the input complete digraph and scales
515
+ the output solution for use in the Asadpour [1]_ ASTP algorithm.
516
+
517
+ The Held-Karp relaxation defines the lower bound for solutions to the
518
+ ATSP, although it does return a fractional solution. This is used in the
519
+ Asadpour algorithm as an initial solution which is later rounded to a
520
+ integral tree within the spanning tree polytopes. This function solves
521
+ the relaxation with the branch and bound method in [2]_.
522
+
523
+ Parameters
524
+ ----------
525
+ G : nx.DiGraph
526
+ The graph should be a complete weighted directed graph.
527
+ The distance between all paris of nodes should be included.
528
+
529
+ weight : string, optional (default="weight")
530
+ Edge data key corresponding to the edge weight.
531
+ If any edge does not have this attribute the weight is set to 1.
532
+
533
+ Returns
534
+ -------
535
+ OPT : float
536
+ The cost for the optimal solution to the Held-Karp relaxation
537
+ z : dict or nx.Graph
538
+ A symmetrized and scaled version of the optimal solution to the
539
+ Held-Karp relaxation for use in the Asadpour algorithm.
540
+
541
+ If an integral solution is found, then that is an optimal solution for
542
+ the ATSP problem and that is returned instead.
543
+
544
+ References
545
+ ----------
546
+ .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
547
+ An o(log n/log log n)-approximation algorithm for the asymmetric
548
+ traveling salesman problem, Operations research, 65 (2017),
549
+ pp. 1043–1061
550
+
551
+ .. [2] M. Held, R. M. Karp, The traveling-salesman problem and minimum
552
+ spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
553
+ pp.1138-1162
554
+ """
555
+ import numpy as np
556
+ from scipy import optimize
557
+
558
+ def k_pi():
559
+ """
560
+ Find the set of minimum 1-Arborescences for G at point pi.
561
+
562
+ Returns
563
+ -------
564
+ Set
565
+ The set of minimum 1-Arborescences
566
+ """
567
+ # Create a copy of G without vertex 1.
568
+ G_1 = G.copy()
569
+ minimum_1_arborescences = set()
570
+ minimum_1_arborescence_weight = math.inf
571
+
572
+ # node is node '1' in the Held and Karp paper
573
+ n = next(G.__iter__())
574
+ G_1.remove_node(n)
575
+
576
+ # Iterate over the spanning arborescences of the graph until we know
577
+ # that we have found the minimum 1-arborescences. My proposed strategy
578
+ # is to find the most extensive root to connect to from 'node 1' and
579
+ # the least expensive one. We then iterate over arborescences until
580
+ # the cost of the basic arborescence is the cost of the minimum one
581
+ # plus the difference between the most and least expensive roots,
582
+ # that way the cost of connecting 'node 1' will by definition not by
583
+ # minimum
584
+ min_root = {"node": None, weight: math.inf}
585
+ max_root = {"node": None, weight: -math.inf}
586
+ for u, v, d in G.edges(n, data=True):
587
+ if d[weight] < min_root[weight]:
588
+ min_root = {"node": v, weight: d[weight]}
589
+ if d[weight] > max_root[weight]:
590
+ max_root = {"node": v, weight: d[weight]}
591
+
592
+ min_in_edge = min(G.in_edges(n, data=True), key=lambda x: x[2][weight])
593
+ min_root[weight] = min_root[weight] + min_in_edge[2][weight]
594
+ max_root[weight] = max_root[weight] + min_in_edge[2][weight]
595
+
596
+ min_arb_weight = math.inf
597
+ for arb in nx.ArborescenceIterator(G_1):
598
+ arb_weight = arb.size(weight)
599
+ if min_arb_weight == math.inf:
600
+ min_arb_weight = arb_weight
601
+ elif arb_weight > min_arb_weight + max_root[weight] - min_root[weight]:
602
+ break
603
+ # We have to pick the root node of the arborescence for the out
604
+ # edge of the first vertex as that is the only node without an
605
+ # edge directed into it.
606
+ for N, deg in arb.in_degree:
607
+ if deg == 0:
608
+ # root found
609
+ arb.add_edge(n, N, **{weight: G[n][N][weight]})
610
+ arb_weight += G[n][N][weight]
611
+ break
612
+
613
+ # We can pick the minimum weight in-edge for the vertex with
614
+ # a cycle. If there are multiple edges with the same, minimum
615
+ # weight, We need to add all of them.
616
+ #
617
+ # Delete the edge (N, v) so that we cannot pick it.
618
+ edge_data = G[N][n]
619
+ G.remove_edge(N, n)
620
+ min_weight = min(G.in_edges(n, data=weight), key=lambda x: x[2])[2]
621
+ min_edges = [
622
+ (u, v, d) for u, v, d in G.in_edges(n, data=weight) if d == min_weight
623
+ ]
624
+ for u, v, d in min_edges:
625
+ new_arb = arb.copy()
626
+ new_arb.add_edge(u, v, **{weight: d})
627
+ new_arb_weight = arb_weight + d
628
+ # Check to see the weight of the arborescence, if it is a
629
+ # new minimum, clear all of the old potential minimum
630
+ # 1-arborescences and add this is the only one. If its
631
+ # weight is above the known minimum, do not add it.
632
+ if new_arb_weight < minimum_1_arborescence_weight:
633
+ minimum_1_arborescences.clear()
634
+ minimum_1_arborescence_weight = new_arb_weight
635
+ # We have a 1-arborescence, add it to the set
636
+ if new_arb_weight == minimum_1_arborescence_weight:
637
+ minimum_1_arborescences.add(new_arb)
638
+ G.add_edge(N, n, **edge_data)
639
+
640
+ return minimum_1_arborescences
641
+
642
+ def direction_of_ascent():
643
+ """
644
+ Find the direction of ascent at point pi.
645
+
646
+ See [1]_ for more information.
647
+
648
+ Returns
649
+ -------
650
+ dict
651
+ A mapping from the nodes of the graph which represents the direction
652
+ of ascent.
653
+
654
+ References
655
+ ----------
656
+ .. [1] M. Held, R. M. Karp, The traveling-salesman problem and minimum
657
+ spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
658
+ pp.1138-1162
659
+ """
660
+ # 1. Set d equal to the zero n-vector.
661
+ d = {}
662
+ for n in G:
663
+ d[n] = 0
664
+ del n
665
+ # 2. Find a 1-Arborescence T^k such that k is in K(pi, d).
666
+ minimum_1_arborescences = k_pi()
667
+ while True:
668
+ # Reduce K(pi) to K(pi, d)
669
+ # Find the arborescence in K(pi) which increases the lest in
670
+ # direction d
671
+ min_k_d_weight = math.inf
672
+ min_k_d = None
673
+ for arborescence in minimum_1_arborescences:
674
+ weighted_cost = 0
675
+ for n, deg in arborescence.degree:
676
+ weighted_cost += d[n] * (deg - 2)
677
+ if weighted_cost < min_k_d_weight:
678
+ min_k_d_weight = weighted_cost
679
+ min_k_d = arborescence
680
+
681
+ # 3. If sum of d_i * v_{i, k} is greater than zero, terminate
682
+ if min_k_d_weight > 0:
683
+ return d, min_k_d
684
+ # 4. d_i = d_i + v_{i, k}
685
+ for n, deg in min_k_d.degree:
686
+ d[n] += deg - 2
687
+ # Check that we do not need to terminate because the direction
688
+ # of ascent does not exist. This is done with linear
689
+ # programming.
690
+ c = np.full(len(minimum_1_arborescences), -1, dtype=int)
691
+ a_eq = np.empty((len(G) + 1, len(minimum_1_arborescences)), dtype=int)
692
+ b_eq = np.zeros(len(G) + 1, dtype=int)
693
+ b_eq[len(G)] = 1
694
+ for arb_count, arborescence in enumerate(minimum_1_arborescences):
695
+ n_count = len(G) - 1
696
+ for n, deg in arborescence.degree:
697
+ a_eq[n_count][arb_count] = deg - 2
698
+ n_count -= 1
699
+ a_eq[len(G)][arb_count] = 1
700
+ program_result = optimize.linprog(
701
+ c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm"
702
+ )
703
+ # If the constants exist, then the direction of ascent doesn't
704
+ if program_result.success:
705
+ # There is no direction of ascent
706
+ return None, minimum_1_arborescences
707
+
708
+ # 5. GO TO 2
709
+
710
+ def find_epsilon(k, d):
711
+ """
712
+ Given the direction of ascent at pi, find the maximum distance we can go
713
+ in that direction.
714
+
715
+ Parameters
716
+ ----------
717
+ k_xy : set
718
+ The set of 1-arborescences which have the minimum rate of increase
719
+ in the direction of ascent
720
+
721
+ d : dict
722
+ The direction of ascent
723
+
724
+ Returns
725
+ -------
726
+ float
727
+ The distance we can travel in direction `d`
728
+ """
729
+ min_epsilon = math.inf
730
+ for e_u, e_v, e_w in G.edges(data=weight):
731
+ if (e_u, e_v) in k.edges:
732
+ continue
733
+ # Now, I have found a condition which MUST be true for the edges to
734
+ # be a valid substitute. The edge in the graph which is the
735
+ # substitute is the one with the same terminated end. This can be
736
+ # checked rather simply.
737
+ #
738
+ # Find the edge within k which is the substitute. Because k is a
739
+ # 1-arborescence, we know that they is only one such edges
740
+ # leading into every vertex.
741
+ if len(k.in_edges(e_v, data=weight)) > 1:
742
+ raise Exception
743
+ sub_u, sub_v, sub_w = next(k.in_edges(e_v, data=weight).__iter__())
744
+ k.add_edge(e_u, e_v, **{weight: e_w})
745
+ k.remove_edge(sub_u, sub_v)
746
+ if (
747
+ max(d for n, d in k.in_degree()) <= 1
748
+ and len(G) == k.number_of_edges()
749
+ and nx.is_weakly_connected(k)
750
+ ):
751
+ # Ascent method calculation
752
+ if d[sub_u] == d[e_u] or sub_w == e_w:
753
+ # Revert to the original graph
754
+ k.remove_edge(e_u, e_v)
755
+ k.add_edge(sub_u, sub_v, **{weight: sub_w})
756
+ continue
757
+ epsilon = (sub_w - e_w) / (d[e_u] - d[sub_u])
758
+ if 0 < epsilon < min_epsilon:
759
+ min_epsilon = epsilon
760
+ # Revert to the original graph
761
+ k.remove_edge(e_u, e_v)
762
+ k.add_edge(sub_u, sub_v, **{weight: sub_w})
763
+
764
+ return min_epsilon
765
+
766
+ # I have to know that the elements in pi correspond to the correct elements
767
+ # in the direction of ascent, even if the node labels are not integers.
768
+ # Thus, I will use dictionaries to made that mapping.
769
+ pi_dict = {}
770
+ for n in G:
771
+ pi_dict[n] = 0
772
+ del n
773
+ original_edge_weights = {}
774
+ for u, v, d in G.edges(data=True):
775
+ original_edge_weights[(u, v)] = d[weight]
776
+ dir_ascent, k_d = direction_of_ascent()
777
+ while dir_ascent is not None:
778
+ max_distance = find_epsilon(k_d, dir_ascent)
779
+ for n, v in dir_ascent.items():
780
+ pi_dict[n] += max_distance * v
781
+ for u, v, d in G.edges(data=True):
782
+ d[weight] = original_edge_weights[(u, v)] + pi_dict[u]
783
+ dir_ascent, k_d = direction_of_ascent()
784
+ nx._clear_cache(G)
785
+ # k_d is no longer an individual 1-arborescence but rather a set of
786
+ # minimal 1-arborescences at the maximum point of the polytope and should
787
+ # be reflected as such
788
+ k_max = k_d
789
+
790
+ # Search for a cycle within k_max. If a cycle exists, return it as the
791
+ # solution
792
+ for k in k_max:
793
+ if len([n for n in k if k.degree(n) == 2]) == G.order():
794
+ # Tour found
795
+ # TODO: this branch does not restore original_edge_weights of G!
796
+ return k.size(weight), k
797
+
798
+ # Write the original edge weights back to G and every member of k_max at
799
+ # the maximum point. Also average the number of times that edge appears in
800
+ # the set of minimal 1-arborescences.
801
+ x_star = {}
802
+ size_k_max = len(k_max)
803
+ for u, v, d in G.edges(data=True):
804
+ edge_count = 0
805
+ d[weight] = original_edge_weights[(u, v)]
806
+ for k in k_max:
807
+ if (u, v) in k.edges():
808
+ edge_count += 1
809
+ k[u][v][weight] = original_edge_weights[(u, v)]
810
+ x_star[(u, v)] = edge_count / size_k_max
811
+ # Now symmetrize the edges in x_star and scale them according to (5) in
812
+ # reference [1]
813
+ z_star = {}
814
+ scale_factor = (G.order() - 1) / G.order()
815
+ for u, v in x_star:
816
+ frequency = x_star[(u, v)] + x_star[(v, u)]
817
+ if frequency > 0:
818
+ z_star[(u, v)] = scale_factor * frequency
819
+ del x_star
820
+ # Return the optimal weight and the z dict
821
+ return next(k_max.__iter__()).size(weight), z_star
822
+
823
+
824
+ @nx._dispatchable
825
+ def spanning_tree_distribution(G, z):
826
+ """
827
+ Find the asadpour exponential distribution of spanning trees.
828
+
829
+ Solves the Maximum Entropy Convex Program in the Asadpour algorithm [1]_
830
+ using the approach in section 7 to build an exponential distribution of
831
+ undirected spanning trees.
832
+
833
+ This algorithm ensures that the probability of any edge in a spanning
834
+ tree is proportional to the sum of the probabilities of the tress
835
+ containing that edge over the sum of the probabilities of all spanning
836
+ trees of the graph.
837
+
838
+ Parameters
839
+ ----------
840
+ G : nx.MultiGraph
841
+ The undirected support graph for the Held Karp relaxation
842
+
843
+ z : dict
844
+ The output of `held_karp_ascent()`, a scaled version of the Held-Karp
845
+ solution.
846
+
847
+ Returns
848
+ -------
849
+ gamma : dict
850
+ The probability distribution which approximately preserves the marginal
851
+ probabilities of `z`.
852
+ """
853
+ from math import exp
854
+ from math import log as ln
855
+
856
+ def q(e):
857
+ """
858
+ The value of q(e) is described in the Asadpour paper is "the
859
+ probability that edge e will be included in a spanning tree T that is
860
+ chosen with probability proportional to exp(gamma(T))" which
861
+ basically means that it is the total probability of the edge appearing
862
+ across the whole distribution.
863
+
864
+ Parameters
865
+ ----------
866
+ e : tuple
867
+ The `(u, v)` tuple describing the edge we are interested in
868
+
869
+ Returns
870
+ -------
871
+ float
872
+ The probability that a spanning tree chosen according to the
873
+ current values of gamma will include edge `e`.
874
+ """
875
+ # Create the laplacian matrices
876
+ for u, v, d in G.edges(data=True):
877
+ d[lambda_key] = exp(gamma[(u, v)])
878
+ G_Kirchhoff = nx.total_spanning_tree_weight(G, lambda_key)
879
+ G_e = nx.contracted_edge(G, e, self_loops=False)
880
+ G_e_Kirchhoff = nx.total_spanning_tree_weight(G_e, lambda_key)
881
+
882
+ # Multiply by the weight of the contracted edge since it is not included
883
+ # in the total weight of the contracted graph.
884
+ return exp(gamma[(e[0], e[1])]) * G_e_Kirchhoff / G_Kirchhoff
885
+
886
+ # initialize gamma to the zero dict
887
+ gamma = {}
888
+ for u, v, _ in G.edges:
889
+ gamma[(u, v)] = 0
890
+
891
+ # set epsilon
892
+ EPSILON = 0.2
893
+
894
+ # pick an edge attribute name that is unlikely to be in the graph
895
+ lambda_key = "spanning_tree_distribution's secret attribute name for lambda"
896
+
897
+ while True:
898
+ # We need to know that know that no values of q_e are greater than
899
+ # (1 + epsilon) * z_e, however changing one gamma value can increase the
900
+ # value of a different q_e, so we have to complete the for loop without
901
+ # changing anything for the condition to be meet
902
+ in_range_count = 0
903
+ # Search for an edge with q_e > (1 + epsilon) * z_e
904
+ for u, v in gamma:
905
+ e = (u, v)
906
+ q_e = q(e)
907
+ z_e = z[e]
908
+ if q_e > (1 + EPSILON) * z_e:
909
+ delta = ln(
910
+ (q_e * (1 - (1 + EPSILON / 2) * z_e))
911
+ / ((1 - q_e) * (1 + EPSILON / 2) * z_e)
912
+ )
913
+ gamma[e] -= delta
914
+ # Check that delta had the desired effect
915
+ new_q_e = q(e)
916
+ desired_q_e = (1 + EPSILON / 2) * z_e
917
+ if round(new_q_e, 8) != round(desired_q_e, 8):
918
+ raise nx.NetworkXError(
919
+ f"Unable to modify probability for edge ({u}, {v})"
920
+ )
921
+ else:
922
+ in_range_count += 1
923
+ # Check if the for loop terminated without changing any gamma
924
+ if in_range_count == len(gamma):
925
+ break
926
+
927
+ # Remove the new edge attributes
928
+ for _, _, d in G.edges(data=True):
929
+ if lambda_key in d:
930
+ del d[lambda_key]
931
+
932
+ return gamma
933
+
934
+
935
+ @nx._dispatchable(edge_attrs="weight")
936
+ def greedy_tsp(G, weight="weight", source=None):
937
+ """Return a low cost cycle starting at `source` and its cost.
938
+
939
+ This approximates a solution to the traveling salesman problem.
940
+ It finds a cycle of all the nodes that a salesman can visit in order
941
+ to visit many nodes while minimizing total distance.
942
+ It uses a simple greedy algorithm.
943
+ In essence, this function returns a large cycle given a source point
944
+ for which the total cost of the cycle is minimized.
945
+
946
+ Parameters
947
+ ----------
948
+ G : Graph
949
+ The Graph should be a complete weighted undirected graph.
950
+ The distance between all pairs of nodes should be included.
951
+
952
+ weight : string, optional (default="weight")
953
+ Edge data key corresponding to the edge weight.
954
+ If any edge does not have this attribute the weight is set to 1.
955
+
956
+ source : node, optional (default: first node in list(G))
957
+ Starting node. If None, defaults to ``next(iter(G))``
958
+
959
+ Returns
960
+ -------
961
+ cycle : list of nodes
962
+ Returns the cycle (list of nodes) that a salesman
963
+ can follow to minimize total weight of the trip.
964
+
965
+ Raises
966
+ ------
967
+ NetworkXError
968
+ If `G` is not complete, the algorithm raises an exception.
969
+
970
+ Examples
971
+ --------
972
+ >>> from networkx.algorithms import approximation as approx
973
+ >>> G = nx.DiGraph()
974
+ >>> G.add_weighted_edges_from(
975
+ ... {
976
+ ... ("A", "B", 3),
977
+ ... ("A", "C", 17),
978
+ ... ("A", "D", 14),
979
+ ... ("B", "A", 3),
980
+ ... ("B", "C", 12),
981
+ ... ("B", "D", 16),
982
+ ... ("C", "A", 13),
983
+ ... ("C", "B", 12),
984
+ ... ("C", "D", 4),
985
+ ... ("D", "A", 14),
986
+ ... ("D", "B", 15),
987
+ ... ("D", "C", 2),
988
+ ... }
989
+ ... )
990
+ >>> cycle = approx.greedy_tsp(G, source="D")
991
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
992
+ >>> cycle
993
+ ['D', 'C', 'B', 'A', 'D']
994
+ >>> cost
995
+ 31
996
+
997
+ Notes
998
+ -----
999
+ This implementation of a greedy algorithm is based on the following:
1000
+
1001
+ - The algorithm adds a node to the solution at every iteration.
1002
+ - The algorithm selects a node not already in the cycle whose connection
1003
+ to the previous node adds the least cost to the cycle.
1004
+
1005
+ A greedy algorithm does not always give the best solution.
1006
+ However, it can construct a first feasible solution which can
1007
+ be passed as a parameter to an iterative improvement algorithm such
1008
+ as Simulated Annealing, or Threshold Accepting.
1009
+
1010
+ Time complexity: It has a running time $O(|V|^2)$
1011
+ """
1012
+ # Check that G is a complete graph
1013
+ N = len(G) - 1
1014
+ # This check ignores selfloops which is what we want here.
1015
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1016
+ raise nx.NetworkXError("G must be a complete graph.")
1017
+
1018
+ if source is None:
1019
+ source = nx.utils.arbitrary_element(G)
1020
+
1021
+ if G.number_of_nodes() == 2:
1022
+ neighbor = next(G.neighbors(source))
1023
+ return [source, neighbor, source]
1024
+
1025
+ nodeset = set(G)
1026
+ nodeset.remove(source)
1027
+ cycle = [source]
1028
+ next_node = source
1029
+ while nodeset:
1030
+ nbrdict = G[next_node]
1031
+ next_node = min(nodeset, key=lambda n: nbrdict[n].get(weight, 1))
1032
+ cycle.append(next_node)
1033
+ nodeset.remove(next_node)
1034
+ cycle.append(cycle[0])
1035
+ return cycle
1036
+
1037
+
1038
+ @py_random_state(9)
1039
+ @nx._dispatchable(edge_attrs="weight")
1040
+ def simulated_annealing_tsp(
1041
+ G,
1042
+ init_cycle,
1043
+ weight="weight",
1044
+ source=None,
1045
+ temp=100,
1046
+ move="1-1",
1047
+ max_iterations=10,
1048
+ N_inner=100,
1049
+ alpha=0.01,
1050
+ seed=None,
1051
+ ):
1052
+ """Returns an approximate solution to the traveling salesman problem.
1053
+
1054
+ This function uses simulated annealing to approximate the minimal cost
1055
+ cycle through the nodes. Starting from a suboptimal solution, simulated
1056
+ annealing perturbs that solution, occasionally accepting changes that make
1057
+ the solution worse to escape from a locally optimal solution. The chance
1058
+ of accepting such changes decreases over the iterations to encourage
1059
+ an optimal result. In summary, the function returns a cycle starting
1060
+ at `source` for which the total cost is minimized. It also returns the cost.
1061
+
1062
+ The chance of accepting a proposed change is related to a parameter called
1063
+ the temperature (annealing has a physical analogue of steel hardening
1064
+ as it cools). As the temperature is reduced, the chance of moves that
1065
+ increase cost goes down.
1066
+
1067
+ Parameters
1068
+ ----------
1069
+ G : Graph
1070
+ `G` should be a complete weighted graph.
1071
+ The distance between all pairs of nodes should be included.
1072
+
1073
+ init_cycle : list of all nodes or "greedy"
1074
+ The initial solution (a cycle through all nodes returning to the start).
1075
+ This argument has no default to make you think about it.
1076
+ If "greedy", use `greedy_tsp(G, weight)`.
1077
+ Other common starting cycles are `list(G) + [next(iter(G))]` or the final
1078
+ result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
1079
+
1080
+ weight : string, optional (default="weight")
1081
+ Edge data key corresponding to the edge weight.
1082
+ If any edge does not have this attribute the weight is set to 1.
1083
+
1084
+ source : node, optional (default: first node in list(G))
1085
+ Starting node. If None, defaults to ``next(iter(G))``
1086
+
1087
+ temp : int, optional (default=100)
1088
+ The algorithm's temperature parameter. It represents the initial
1089
+ value of temperature
1090
+
1091
+ move : "1-1" or "1-0" or function, optional (default="1-1")
1092
+ Indicator of what move to use when finding new trial solutions.
1093
+ Strings indicate two special built-in moves:
1094
+
1095
+ - "1-1": 1-1 exchange which transposes the position
1096
+ of two elements of the current solution.
1097
+ The function called is :func:`swap_two_nodes`.
1098
+ For example if we apply 1-1 exchange in the solution
1099
+ ``A = [3, 2, 1, 4, 3]``
1100
+ we can get the following by the transposition of 1 and 4 elements:
1101
+ ``A' = [3, 2, 4, 1, 3]``
1102
+ - "1-0": 1-0 exchange which moves an node in the solution
1103
+ to a new position.
1104
+ The function called is :func:`move_one_node`.
1105
+ For example if we apply 1-0 exchange in the solution
1106
+ ``A = [3, 2, 1, 4, 3]``
1107
+ we can transfer the fourth element to the second position:
1108
+ ``A' = [3, 4, 2, 1, 3]``
1109
+
1110
+ You may provide your own functions to enact a move from
1111
+ one solution to a neighbor solution. The function must take
1112
+ the solution as input along with a `seed` input to control
1113
+ random number generation (see the `seed` input here).
1114
+ Your function should maintain the solution as a cycle with
1115
+ equal first and last node and all others appearing once.
1116
+ Your function should return the new solution.
1117
+
1118
+ max_iterations : int, optional (default=10)
1119
+ Declared done when this number of consecutive iterations of
1120
+ the outer loop occurs without any change in the best cost solution.
1121
+
1122
+ N_inner : int, optional (default=100)
1123
+ The number of iterations of the inner loop.
1124
+
1125
+ alpha : float between (0, 1), optional (default=0.01)
1126
+ Percentage of temperature decrease in each iteration
1127
+ of outer loop
1128
+
1129
+ seed : integer, random_state, or None (default)
1130
+ Indicator of random number generation state.
1131
+ See :ref:`Randomness<randomness>`.
1132
+
1133
+ Returns
1134
+ -------
1135
+ cycle : list of nodes
1136
+ Returns the cycle (list of nodes) that a salesman
1137
+ can follow to minimize total weight of the trip.
1138
+
1139
+ Raises
1140
+ ------
1141
+ NetworkXError
1142
+ If `G` is not complete the algorithm raises an exception.
1143
+
1144
+ Examples
1145
+ --------
1146
+ >>> from networkx.algorithms import approximation as approx
1147
+ >>> G = nx.DiGraph()
1148
+ >>> G.add_weighted_edges_from(
1149
+ ... {
1150
+ ... ("A", "B", 3),
1151
+ ... ("A", "C", 17),
1152
+ ... ("A", "D", 14),
1153
+ ... ("B", "A", 3),
1154
+ ... ("B", "C", 12),
1155
+ ... ("B", "D", 16),
1156
+ ... ("C", "A", 13),
1157
+ ... ("C", "B", 12),
1158
+ ... ("C", "D", 4),
1159
+ ... ("D", "A", 14),
1160
+ ... ("D", "B", 15),
1161
+ ... ("D", "C", 2),
1162
+ ... }
1163
+ ... )
1164
+ >>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D")
1165
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1166
+ >>> cycle
1167
+ ['D', 'C', 'B', 'A', 'D']
1168
+ >>> cost
1169
+ 31
1170
+ >>> incycle = ["D", "B", "A", "C", "D"]
1171
+ >>> cycle = approx.simulated_annealing_tsp(G, incycle, source="D")
1172
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1173
+ >>> cycle
1174
+ ['D', 'C', 'B', 'A', 'D']
1175
+ >>> cost
1176
+ 31
1177
+
1178
+ Notes
1179
+ -----
1180
+ Simulated Annealing is a metaheuristic local search algorithm.
1181
+ The main characteristic of this algorithm is that it accepts
1182
+ even solutions which lead to the increase of the cost in order
1183
+ to escape from low quality local optimal solutions.
1184
+
1185
+ This algorithm needs an initial solution. If not provided, it is
1186
+ constructed by a simple greedy algorithm. At every iteration, the
1187
+ algorithm selects thoughtfully a neighbor solution.
1188
+ Consider $c(x)$ cost of current solution and $c(x')$ cost of a
1189
+ neighbor solution.
1190
+ If $c(x') - c(x) <= 0$ then the neighbor solution becomes the current
1191
+ solution for the next iteration. Otherwise, the algorithm accepts
1192
+ the neighbor solution with probability $p = exp - ([c(x') - c(x)] / temp)$.
1193
+ Otherwise the current solution is retained.
1194
+
1195
+ `temp` is a parameter of the algorithm and represents temperature.
1196
+
1197
+ Time complexity:
1198
+ For $N_i$ iterations of the inner loop and $N_o$ iterations of the
1199
+ outer loop, this algorithm has running time $O(N_i * N_o * |V|)$.
1200
+
1201
+ For more information and how the algorithm is inspired see:
1202
+ http://en.wikipedia.org/wiki/Simulated_annealing
1203
+ """
1204
+ if move == "1-1":
1205
+ move = swap_two_nodes
1206
+ elif move == "1-0":
1207
+ move = move_one_node
1208
+ if init_cycle == "greedy":
1209
+ # Construct an initial solution using a greedy algorithm.
1210
+ cycle = greedy_tsp(G, weight=weight, source=source)
1211
+ if G.number_of_nodes() == 2:
1212
+ return cycle
1213
+
1214
+ else:
1215
+ cycle = list(init_cycle)
1216
+ if source is None:
1217
+ source = cycle[0]
1218
+ elif source != cycle[0]:
1219
+ raise nx.NetworkXError("source must be first node in init_cycle")
1220
+ if cycle[0] != cycle[-1]:
1221
+ raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
1222
+
1223
+ if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
1224
+ raise nx.NetworkXError("init_cycle should be a cycle over all nodes in G.")
1225
+
1226
+ # Check that G is a complete graph
1227
+ N = len(G) - 1
1228
+ # This check ignores selfloops which is what we want here.
1229
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1230
+ raise nx.NetworkXError("G must be a complete graph.")
1231
+
1232
+ if G.number_of_nodes() == 2:
1233
+ neighbor = next(G.neighbors(source))
1234
+ return [source, neighbor, source]
1235
+
1236
+ # Find the cost of initial solution
1237
+ cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
1238
+
1239
+ count = 0
1240
+ best_cycle = cycle.copy()
1241
+ best_cost = cost
1242
+ while count <= max_iterations and temp > 0:
1243
+ count += 1
1244
+ for i in range(N_inner):
1245
+ adj_sol = move(cycle, seed)
1246
+ adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
1247
+ delta = adj_cost - cost
1248
+ if delta <= 0:
1249
+ # Set current solution the adjacent solution.
1250
+ cycle = adj_sol
1251
+ cost = adj_cost
1252
+
1253
+ if cost < best_cost:
1254
+ count = 0
1255
+ best_cycle = cycle.copy()
1256
+ best_cost = cost
1257
+ else:
1258
+ # Accept even a worse solution with probability p.
1259
+ p = math.exp(-delta / temp)
1260
+ if p >= seed.random():
1261
+ cycle = adj_sol
1262
+ cost = adj_cost
1263
+ temp -= temp * alpha
1264
+
1265
+ return best_cycle
1266
+
1267
+
1268
+ @py_random_state(9)
1269
+ @nx._dispatchable(edge_attrs="weight")
1270
+ def threshold_accepting_tsp(
1271
+ G,
1272
+ init_cycle,
1273
+ weight="weight",
1274
+ source=None,
1275
+ threshold=1,
1276
+ move="1-1",
1277
+ max_iterations=10,
1278
+ N_inner=100,
1279
+ alpha=0.1,
1280
+ seed=None,
1281
+ ):
1282
+ """Returns an approximate solution to the traveling salesman problem.
1283
+
1284
+ This function uses threshold accepting methods to approximate the minimal cost
1285
+ cycle through the nodes. Starting from a suboptimal solution, threshold
1286
+ accepting methods perturb that solution, accepting any changes that make
1287
+ the solution no worse than increasing by a threshold amount. Improvements
1288
+ in cost are accepted, but so are changes leading to small increases in cost.
1289
+ This allows the solution to leave suboptimal local minima in solution space.
1290
+ The threshold is decreased slowly as iterations proceed helping to ensure
1291
+ an optimum. In summary, the function returns a cycle starting at `source`
1292
+ for which the total cost is minimized.
1293
+
1294
+ Parameters
1295
+ ----------
1296
+ G : Graph
1297
+ `G` should be a complete weighted graph.
1298
+ The distance between all pairs of nodes should be included.
1299
+
1300
+ init_cycle : list or "greedy"
1301
+ The initial solution (a cycle through all nodes returning to the start).
1302
+ This argument has no default to make you think about it.
1303
+ If "greedy", use `greedy_tsp(G, weight)`.
1304
+ Other common starting cycles are `list(G) + [next(iter(G))]` or the final
1305
+ result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
1306
+
1307
+ weight : string, optional (default="weight")
1308
+ Edge data key corresponding to the edge weight.
1309
+ If any edge does not have this attribute the weight is set to 1.
1310
+
1311
+ source : node, optional (default: first node in list(G))
1312
+ Starting node. If None, defaults to ``next(iter(G))``
1313
+
1314
+ threshold : int, optional (default=1)
1315
+ The algorithm's threshold parameter. It represents the initial
1316
+ threshold's value
1317
+
1318
+ move : "1-1" or "1-0" or function, optional (default="1-1")
1319
+ Indicator of what move to use when finding new trial solutions.
1320
+ Strings indicate two special built-in moves:
1321
+
1322
+ - "1-1": 1-1 exchange which transposes the position
1323
+ of two elements of the current solution.
1324
+ The function called is :func:`swap_two_nodes`.
1325
+ For example if we apply 1-1 exchange in the solution
1326
+ ``A = [3, 2, 1, 4, 3]``
1327
+ we can get the following by the transposition of 1 and 4 elements:
1328
+ ``A' = [3, 2, 4, 1, 3]``
1329
+ - "1-0": 1-0 exchange which moves an node in the solution
1330
+ to a new position.
1331
+ The function called is :func:`move_one_node`.
1332
+ For example if we apply 1-0 exchange in the solution
1333
+ ``A = [3, 2, 1, 4, 3]``
1334
+ we can transfer the fourth element to the second position:
1335
+ ``A' = [3, 4, 2, 1, 3]``
1336
+
1337
+ You may provide your own functions to enact a move from
1338
+ one solution to a neighbor solution. The function must take
1339
+ the solution as input along with a `seed` input to control
1340
+ random number generation (see the `seed` input here).
1341
+ Your function should maintain the solution as a cycle with
1342
+ equal first and last node and all others appearing once.
1343
+ Your function should return the new solution.
1344
+
1345
+ max_iterations : int, optional (default=10)
1346
+ Declared done when this number of consecutive iterations of
1347
+ the outer loop occurs without any change in the best cost solution.
1348
+
1349
+ N_inner : int, optional (default=100)
1350
+ The number of iterations of the inner loop.
1351
+
1352
+ alpha : float between (0, 1), optional (default=0.1)
1353
+ Percentage of threshold decrease when there is at
1354
+ least one acceptance of a neighbor solution.
1355
+ If no inner loop moves are accepted the threshold remains unchanged.
1356
+
1357
+ seed : integer, random_state, or None (default)
1358
+ Indicator of random number generation state.
1359
+ See :ref:`Randomness<randomness>`.
1360
+
1361
+ Returns
1362
+ -------
1363
+ cycle : list of nodes
1364
+ Returns the cycle (list of nodes) that a salesman
1365
+ can follow to minimize total weight of the trip.
1366
+
1367
+ Raises
1368
+ ------
1369
+ NetworkXError
1370
+ If `G` is not complete the algorithm raises an exception.
1371
+
1372
+ Examples
1373
+ --------
1374
+ >>> from networkx.algorithms import approximation as approx
1375
+ >>> G = nx.DiGraph()
1376
+ >>> G.add_weighted_edges_from(
1377
+ ... {
1378
+ ... ("A", "B", 3),
1379
+ ... ("A", "C", 17),
1380
+ ... ("A", "D", 14),
1381
+ ... ("B", "A", 3),
1382
+ ... ("B", "C", 12),
1383
+ ... ("B", "D", 16),
1384
+ ... ("C", "A", 13),
1385
+ ... ("C", "B", 12),
1386
+ ... ("C", "D", 4),
1387
+ ... ("D", "A", 14),
1388
+ ... ("D", "B", 15),
1389
+ ... ("D", "C", 2),
1390
+ ... }
1391
+ ... )
1392
+ >>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D")
1393
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1394
+ >>> cycle
1395
+ ['D', 'C', 'B', 'A', 'D']
1396
+ >>> cost
1397
+ 31
1398
+ >>> incycle = ["D", "B", "A", "C", "D"]
1399
+ >>> cycle = approx.threshold_accepting_tsp(G, incycle, source="D")
1400
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1401
+ >>> cycle
1402
+ ['D', 'C', 'B', 'A', 'D']
1403
+ >>> cost
1404
+ 31
1405
+
1406
+ Notes
1407
+ -----
1408
+ Threshold Accepting is a metaheuristic local search algorithm.
1409
+ The main characteristic of this algorithm is that it accepts
1410
+ even solutions which lead to the increase of the cost in order
1411
+ to escape from low quality local optimal solutions.
1412
+
1413
+ This algorithm needs an initial solution. This solution can be
1414
+ constructed by a simple greedy algorithm. At every iteration, it
1415
+ selects thoughtfully a neighbor solution.
1416
+ Consider $c(x)$ cost of current solution and $c(x')$ cost of
1417
+ neighbor solution.
1418
+ If $c(x') - c(x) <= threshold$ then the neighbor solution becomes the current
1419
+ solution for the next iteration, where the threshold is named threshold.
1420
+
1421
+ In comparison to the Simulated Annealing algorithm, the Threshold
1422
+ Accepting algorithm does not accept very low quality solutions
1423
+ (due to the presence of the threshold value). In the case of
1424
+ Simulated Annealing, even a very low quality solution can
1425
+ be accepted with probability $p$.
1426
+
1427
+ Time complexity:
1428
+ It has a running time $O(m * n * |V|)$ where $m$ and $n$ are the number
1429
+ of times the outer and inner loop run respectively.
1430
+
1431
+ For more information and how algorithm is inspired see:
1432
+ https://doi.org/10.1016/0021-9991(90)90201-B
1433
+
1434
+ See Also
1435
+ --------
1436
+ simulated_annealing_tsp
1437
+
1438
+ """
1439
+ if move == "1-1":
1440
+ move = swap_two_nodes
1441
+ elif move == "1-0":
1442
+ move = move_one_node
1443
+ if init_cycle == "greedy":
1444
+ # Construct an initial solution using a greedy algorithm.
1445
+ cycle = greedy_tsp(G, weight=weight, source=source)
1446
+ if G.number_of_nodes() == 2:
1447
+ return cycle
1448
+
1449
+ else:
1450
+ cycle = list(init_cycle)
1451
+ if source is None:
1452
+ source = cycle[0]
1453
+ elif source != cycle[0]:
1454
+ raise nx.NetworkXError("source must be first node in init_cycle")
1455
+ if cycle[0] != cycle[-1]:
1456
+ raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
1457
+
1458
+ if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
1459
+ raise nx.NetworkXError("init_cycle is not all and only nodes.")
1460
+
1461
+ # Check that G is a complete graph
1462
+ N = len(G) - 1
1463
+ # This check ignores selfloops which is what we want here.
1464
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1465
+ raise nx.NetworkXError("G must be a complete graph.")
1466
+
1467
+ if G.number_of_nodes() == 2:
1468
+ neighbor = list(G.neighbors(source))[0]
1469
+ return [source, neighbor, source]
1470
+
1471
+ # Find the cost of initial solution
1472
+ cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
1473
+
1474
+ count = 0
1475
+ best_cycle = cycle.copy()
1476
+ best_cost = cost
1477
+ while count <= max_iterations:
1478
+ count += 1
1479
+ accepted = False
1480
+ for i in range(N_inner):
1481
+ adj_sol = move(cycle, seed)
1482
+ adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
1483
+ delta = adj_cost - cost
1484
+ if delta <= threshold:
1485
+ accepted = True
1486
+
1487
+ # Set current solution the adjacent solution.
1488
+ cycle = adj_sol
1489
+ cost = adj_cost
1490
+
1491
+ if cost < best_cost:
1492
+ count = 0
1493
+ best_cycle = cycle.copy()
1494
+ best_cost = cost
1495
+ if accepted:
1496
+ threshold -= threshold * alpha
1497
+
1498
+ return best_cycle
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/treewidth.py ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing treewidth decomposition.
2
+
3
+ Treewidth of an undirected graph is a number associated with the graph.
4
+ It can be defined as the size of the largest vertex set (bag) in a tree
5
+ decomposition of the graph minus one.
6
+
7
+ `Wikipedia: Treewidth <https://en.wikipedia.org/wiki/Treewidth>`_
8
+
9
+ The notions of treewidth and tree decomposition have gained their
10
+ attractiveness partly because many graph and network problems that are
11
+ intractable (e.g., NP-hard) on arbitrary graphs become efficiently
12
+ solvable (e.g., with a linear time algorithm) when the treewidth of the
13
+ input graphs is bounded by a constant [1]_ [2]_.
14
+
15
+ There are two different functions for computing a tree decomposition:
16
+ :func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`.
17
+
18
+ .. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth
19
+ computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275.
20
+ http://dx.doi.org/10.1016/j.ic.2009.03.008
21
+
22
+ .. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information
23
+ and Computing Sciences, Utrecht University.
24
+ Technical Report UU-CS-2005-018.
25
+ http://www.cs.uu.nl
26
+
27
+ .. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*.
28
+ https://web.archive.org/web/20210507025929/http://web.eecs.utk.edu/~cphill25/cs594_spring2015_projects/treewidth.pdf
29
+
30
+ """
31
+
32
+ import itertools
33
+ import sys
34
+ from heapq import heapify, heappop, heappush
35
+
36
+ import networkx as nx
37
+ from networkx.utils import not_implemented_for
38
+
39
+ __all__ = ["treewidth_min_degree", "treewidth_min_fill_in"]
40
+
41
+
42
+ @not_implemented_for("directed")
43
+ @not_implemented_for("multigraph")
44
+ @nx._dispatchable(returns_graph=True)
45
+ def treewidth_min_degree(G):
46
+ """Returns a treewidth decomposition using the Minimum Degree heuristic.
47
+
48
+ The heuristic chooses the nodes according to their degree, i.e., first
49
+ the node with the lowest degree is chosen, then the graph is updated
50
+ and the corresponding node is removed. Next, a new node with the lowest
51
+ degree is chosen, and so on.
52
+
53
+ Parameters
54
+ ----------
55
+ G : NetworkX graph
56
+
57
+ Returns
58
+ -------
59
+ Treewidth decomposition : (int, Graph) tuple
60
+ 2-tuple with treewidth and the corresponding decomposed tree.
61
+ """
62
+ deg_heuristic = MinDegreeHeuristic(G)
63
+ return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph))
64
+
65
+
66
+ @not_implemented_for("directed")
67
+ @not_implemented_for("multigraph")
68
+ @nx._dispatchable(returns_graph=True)
69
+ def treewidth_min_fill_in(G):
70
+ """Returns a treewidth decomposition using the Minimum Fill-in heuristic.
71
+
72
+ The heuristic chooses a node from the graph, where the number of edges
73
+ added turning the neighborhood of the chosen node into clique is as
74
+ small as possible.
75
+
76
+ Parameters
77
+ ----------
78
+ G : NetworkX graph
79
+
80
+ Returns
81
+ -------
82
+ Treewidth decomposition : (int, Graph) tuple
83
+ 2-tuple with treewidth and the corresponding decomposed tree.
84
+ """
85
+ return treewidth_decomp(G, min_fill_in_heuristic)
86
+
87
+
88
+ class MinDegreeHeuristic:
89
+ """Implements the Minimum Degree heuristic.
90
+
91
+ The heuristic chooses the nodes according to their degree
92
+ (number of neighbors), i.e., first the node with the lowest degree is
93
+ chosen, then the graph is updated and the corresponding node is
94
+ removed. Next, a new node with the lowest degree is chosen, and so on.
95
+ """
96
+
97
+ def __init__(self, graph):
98
+ self._graph = graph
99
+
100
+ # nodes that have to be updated in the heap before each iteration
101
+ self._update_nodes = []
102
+
103
+ self._degreeq = [] # a heapq with 3-tuples (degree,unique_id,node)
104
+ self.count = itertools.count()
105
+
106
+ # build heap with initial degrees
107
+ for n in graph:
108
+ self._degreeq.append((len(graph[n]), next(self.count), n))
109
+ heapify(self._degreeq)
110
+
111
+ def best_node(self, graph):
112
+ # update nodes in self._update_nodes
113
+ for n in self._update_nodes:
114
+ # insert changed degrees into degreeq
115
+ heappush(self._degreeq, (len(graph[n]), next(self.count), n))
116
+
117
+ # get the next valid (minimum degree) node
118
+ while self._degreeq:
119
+ (min_degree, _, elim_node) = heappop(self._degreeq)
120
+ if elim_node not in graph or len(graph[elim_node]) != min_degree:
121
+ # outdated entry in degreeq
122
+ continue
123
+ elif min_degree == len(graph) - 1:
124
+ # fully connected: abort condition
125
+ return None
126
+
127
+ # remember to update nodes in the heap before getting the next node
128
+ self._update_nodes = graph[elim_node]
129
+ return elim_node
130
+
131
+ # the heap is empty: abort
132
+ return None
133
+
134
+
135
+ def min_fill_in_heuristic(graph):
136
+ """Implements the Minimum Degree heuristic.
137
+
138
+ Returns the node from the graph, where the number of edges added when
139
+ turning the neighborhood of the chosen node into clique is as small as
140
+ possible. This algorithm chooses the nodes using the Minimum Fill-In
141
+ heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses
142
+ additional constant memory."""
143
+
144
+ if len(graph) == 0:
145
+ return None
146
+
147
+ min_fill_in_node = None
148
+
149
+ min_fill_in = sys.maxsize
150
+
151
+ # sort nodes by degree
152
+ nodes_by_degree = sorted(graph, key=lambda x: len(graph[x]))
153
+ min_degree = len(graph[nodes_by_degree[0]])
154
+
155
+ # abort condition (handle complete graph)
156
+ if min_degree == len(graph) - 1:
157
+ return None
158
+
159
+ for node in nodes_by_degree:
160
+ num_fill_in = 0
161
+ nbrs = graph[node]
162
+ for nbr in nbrs:
163
+ # count how many nodes in nbrs current nbr is not connected to
164
+ # subtract 1 for the node itself
165
+ num_fill_in += len(nbrs - graph[nbr]) - 1
166
+ if num_fill_in >= 2 * min_fill_in:
167
+ break
168
+
169
+ num_fill_in /= 2 # divide by 2 because of double counting
170
+
171
+ if num_fill_in < min_fill_in: # update min-fill-in node
172
+ if num_fill_in == 0:
173
+ return node
174
+ min_fill_in = num_fill_in
175
+ min_fill_in_node = node
176
+
177
+ return min_fill_in_node
178
+
179
+
180
+ @nx._dispatchable(returns_graph=True)
181
+ def treewidth_decomp(G, heuristic=min_fill_in_heuristic):
182
+ """Returns a treewidth decomposition using the passed heuristic.
183
+
184
+ Parameters
185
+ ----------
186
+ G : NetworkX graph
187
+ heuristic : heuristic function
188
+
189
+ Returns
190
+ -------
191
+ Treewidth decomposition : (int, Graph) tuple
192
+ 2-tuple with treewidth and the corresponding decomposed tree.
193
+ """
194
+
195
+ # make dict-of-sets structure
196
+ graph = {n: set(G[n]) - {n} for n in G}
197
+
198
+ # stack containing nodes and neighbors in the order from the heuristic
199
+ node_stack = []
200
+
201
+ # get first node from heuristic
202
+ elim_node = heuristic(graph)
203
+ while elim_node is not None:
204
+ # connect all neighbors with each other
205
+ nbrs = graph[elim_node]
206
+ for u, v in itertools.permutations(nbrs, 2):
207
+ if v not in graph[u]:
208
+ graph[u].add(v)
209
+
210
+ # push node and its current neighbors on stack
211
+ node_stack.append((elim_node, nbrs))
212
+
213
+ # remove node from graph
214
+ for u in graph[elim_node]:
215
+ graph[u].remove(elim_node)
216
+
217
+ del graph[elim_node]
218
+ elim_node = heuristic(graph)
219
+
220
+ # the abort condition is met; put all remaining nodes into one bag
221
+ decomp = nx.Graph()
222
+ first_bag = frozenset(graph.keys())
223
+ decomp.add_node(first_bag)
224
+
225
+ treewidth = len(first_bag) - 1
226
+
227
+ while node_stack:
228
+ # get node and its neighbors from the stack
229
+ (curr_node, nbrs) = node_stack.pop()
230
+
231
+ # find a bag all neighbors are in
232
+ old_bag = None
233
+ for bag in decomp.nodes:
234
+ if nbrs <= bag:
235
+ old_bag = bag
236
+ break
237
+
238
+ if old_bag is None:
239
+ # no old_bag was found: just connect to the first_bag
240
+ old_bag = first_bag
241
+
242
+ # create new node for decomposition
243
+ nbrs.add(curr_node)
244
+ new_bag = frozenset(nbrs)
245
+
246
+ # update treewidth
247
+ treewidth = max(treewidth, len(new_bag) - 1)
248
+
249
+ # add edge to decomposition (implicitly also adds the new node)
250
+ decomp.add_edge(old_bag, new_bag)
251
+
252
+ return treewidth, decomp
venv/lib/python3.10/site-packages/networkx/algorithms/approximation/vertex_cover.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing an approximate minimum weight vertex cover.
2
+
3
+ A |vertex cover|_ is a subset of nodes such that each edge in the graph
4
+ is incident to at least one node in the subset.
5
+
6
+ .. _vertex cover: https://en.wikipedia.org/wiki/Vertex_cover
7
+ .. |vertex cover| replace:: *vertex cover*
8
+
9
+ """
10
+ import networkx as nx
11
+
12
+ __all__ = ["min_weighted_vertex_cover"]
13
+
14
+
15
+ @nx._dispatchable(node_attrs="weight")
16
+ def min_weighted_vertex_cover(G, weight=None):
17
+ r"""Returns an approximate minimum weighted vertex cover.
18
+
19
+ The set of nodes returned by this function is guaranteed to be a
20
+ vertex cover, and the total weight of the set is guaranteed to be at
21
+ most twice the total weight of the minimum weight vertex cover. In
22
+ other words,
23
+
24
+ .. math::
25
+
26
+ w(S) \leq 2 * w(S^*),
27
+
28
+ where $S$ is the vertex cover returned by this function,
29
+ $S^*$ is the vertex cover of minimum weight out of all vertex
30
+ covers of the graph, and $w$ is the function that computes the
31
+ sum of the weights of each node in that given set.
32
+
33
+ Parameters
34
+ ----------
35
+ G : NetworkX graph
36
+
37
+ weight : string, optional (default = None)
38
+ If None, every node has weight 1. If a string, use this node
39
+ attribute as the node weight. A node without this attribute is
40
+ assumed to have weight 1.
41
+
42
+ Returns
43
+ -------
44
+ min_weighted_cover : set
45
+ Returns a set of nodes whose weight sum is no more than twice
46
+ the weight sum of the minimum weight vertex cover.
47
+
48
+ Notes
49
+ -----
50
+ For a directed graph, a vertex cover has the same definition: a set
51
+ of nodes such that each edge in the graph is incident to at least
52
+ one node in the set. Whether the node is the head or tail of the
53
+ directed edge is ignored.
54
+
55
+ This is the local-ratio algorithm for computing an approximate
56
+ vertex cover. The algorithm greedily reduces the costs over edges,
57
+ iteratively building a cover. The worst-case runtime of this
58
+ implementation is $O(m \log n)$, where $n$ is the number
59
+ of nodes and $m$ the number of edges in the graph.
60
+
61
+ References
62
+ ----------
63
+ .. [1] Bar-Yehuda, R., and Even, S. (1985). "A local-ratio theorem for
64
+ approximating the weighted vertex cover problem."
65
+ *Annals of Discrete Mathematics*, 25, 27–46
66
+ <http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf>
67
+
68
+ """
69
+ cost = dict(G.nodes(data=weight, default=1))
70
+ # While there are uncovered edges, choose an uncovered and update
71
+ # the cost of the remaining edges.
72
+ cover = set()
73
+ for u, v in G.edges():
74
+ if u in cover or v in cover:
75
+ continue
76
+ if cost[u] <= cost[v]:
77
+ cover.add(u)
78
+ cost[v] -= cost[u]
79
+ else:
80
+ cover.add(v)
81
+ cost[u] -= cost[v]
82
+ return cover
venv/lib/python3.10/site-packages/networkx/algorithms/boundary.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines to find the boundary of a set of nodes.
2
+
3
+ An edge boundary is a set of edges, each of which has exactly one
4
+ endpoint in a given set of nodes (or, in the case of directed graphs,
5
+ the set of edges whose source node is in the set).
6
+
7
+ A node boundary of a set *S* of nodes is the set of (out-)neighbors of
8
+ nodes in *S* that are outside *S*.
9
+
10
+ """
11
+ from itertools import chain
12
+
13
+ import networkx as nx
14
+
15
+ __all__ = ["edge_boundary", "node_boundary"]
16
+
17
+
18
+ @nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data")
19
+ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None):
20
+ """Returns the edge boundary of `nbunch1`.
21
+
22
+ The *edge boundary* of a set *S* with respect to a set *T* is the
23
+ set of edges (*u*, *v*) such that *u* is in *S* and *v* is in *T*.
24
+ If *T* is not specified, it is assumed to be the set of all nodes
25
+ not in *S*.
26
+
27
+ Parameters
28
+ ----------
29
+ G : NetworkX graph
30
+
31
+ nbunch1 : iterable
32
+ Iterable of nodes in the graph representing the set of nodes
33
+ whose edge boundary will be returned. (This is the set *S* from
34
+ the definition above.)
35
+
36
+ nbunch2 : iterable
37
+ Iterable of nodes representing the target (or "exterior") set of
38
+ nodes. (This is the set *T* from the definition above.) If not
39
+ specified, this is assumed to be the set of all nodes in `G`
40
+ not in `nbunch1`.
41
+
42
+ keys : bool
43
+ This parameter has the same meaning as in
44
+ :meth:`MultiGraph.edges`.
45
+
46
+ data : bool or object
47
+ This parameter has the same meaning as in
48
+ :meth:`MultiGraph.edges`.
49
+
50
+ default : object
51
+ This parameter has the same meaning as in
52
+ :meth:`MultiGraph.edges`.
53
+
54
+ Returns
55
+ -------
56
+ iterator
57
+ An iterator over the edges in the boundary of `nbunch1` with
58
+ respect to `nbunch2`. If `keys`, `data`, or `default`
59
+ are specified and `G` is a multigraph, then edges are returned
60
+ with keys and/or data, as in :meth:`MultiGraph.edges`.
61
+
62
+ Examples
63
+ --------
64
+ >>> G = nx.wheel_graph(6)
65
+
66
+ When nbunch2=None:
67
+
68
+ >>> list(nx.edge_boundary(G, (1, 3)))
69
+ [(1, 0), (1, 2), (1, 5), (3, 0), (3, 2), (3, 4)]
70
+
71
+ When nbunch2 is given:
72
+
73
+ >>> list(nx.edge_boundary(G, (1, 3), (2, 0)))
74
+ [(1, 0), (1, 2), (3, 0), (3, 2)]
75
+
76
+ Notes
77
+ -----
78
+ Any element of `nbunch` that is not in the graph `G` will be
79
+ ignored.
80
+
81
+ `nbunch1` and `nbunch2` are usually meant to be disjoint, but in
82
+ the interest of speed and generality, that is not required here.
83
+
84
+ """
85
+ nset1 = {n for n in nbunch1 if n in G}
86
+ # Here we create an iterator over edges incident to nodes in the set
87
+ # `nset1`. The `Graph.edges()` method does not provide a guarantee
88
+ # on the orientation of the edges, so our algorithm below must
89
+ # handle the case in which exactly one orientation, either (u, v) or
90
+ # (v, u), appears in this iterable.
91
+ if G.is_multigraph():
92
+ edges = G.edges(nset1, data=data, keys=keys, default=default)
93
+ else:
94
+ edges = G.edges(nset1, data=data, default=default)
95
+ # If `nbunch2` is not provided, then it is assumed to be the set
96
+ # complement of `nbunch1`. For the sake of efficiency, this is
97
+ # implemented by using the `not in` operator, instead of by creating
98
+ # an additional set and using the `in` operator.
99
+ if nbunch2 is None:
100
+ return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1))
101
+ nset2 = set(nbunch2)
102
+ return (
103
+ e
104
+ for e in edges
105
+ if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2)
106
+ )
107
+
108
+
109
+ @nx._dispatchable
110
+ def node_boundary(G, nbunch1, nbunch2=None):
111
+ """Returns the node boundary of `nbunch1`.
112
+
113
+ The *node boundary* of a set *S* with respect to a set *T* is the
114
+ set of nodes *v* in *T* such that for some *u* in *S*, there is an
115
+ edge joining *u* to *v*. If *T* is not specified, it is assumed to
116
+ be the set of all nodes not in *S*.
117
+
118
+ Parameters
119
+ ----------
120
+ G : NetworkX graph
121
+
122
+ nbunch1 : iterable
123
+ Iterable of nodes in the graph representing the set of nodes
124
+ whose node boundary will be returned. (This is the set *S* from
125
+ the definition above.)
126
+
127
+ nbunch2 : iterable
128
+ Iterable of nodes representing the target (or "exterior") set of
129
+ nodes. (This is the set *T* from the definition above.) If not
130
+ specified, this is assumed to be the set of all nodes in `G`
131
+ not in `nbunch1`.
132
+
133
+ Returns
134
+ -------
135
+ set
136
+ The node boundary of `nbunch1` with respect to `nbunch2`.
137
+
138
+ Examples
139
+ --------
140
+ >>> G = nx.wheel_graph(6)
141
+
142
+ When nbunch2=None:
143
+
144
+ >>> list(nx.node_boundary(G, (3, 4)))
145
+ [0, 2, 5]
146
+
147
+ When nbunch2 is given:
148
+
149
+ >>> list(nx.node_boundary(G, (3, 4), (0, 1, 5)))
150
+ [0, 5]
151
+
152
+ Notes
153
+ -----
154
+ Any element of `nbunch` that is not in the graph `G` will be
155
+ ignored.
156
+
157
+ `nbunch1` and `nbunch2` are usually meant to be disjoint, but in
158
+ the interest of speed and generality, that is not required here.
159
+
160
+ """
161
+ nset1 = {n for n in nbunch1 if n in G}
162
+ bdy = set(chain.from_iterable(G[v] for v in nset1)) - nset1
163
+ # If `nbunch2` is not specified, it is assumed to be the set
164
+ # complement of `nbunch1`.
165
+ if nbunch2 is not None:
166
+ bdy &= set(nbunch2)
167
+ return bdy
venv/lib/python3.10/site-packages/networkx/algorithms/bridges.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bridge-finding algorithms."""
2
+ from itertools import chain
3
+
4
+ import networkx as nx
5
+ from networkx.utils import not_implemented_for
6
+
7
+ __all__ = ["bridges", "has_bridges", "local_bridges"]
8
+
9
+
10
+ @not_implemented_for("directed")
11
+ @nx._dispatchable
12
+ def bridges(G, root=None):
13
+ """Generate all bridges in a graph.
14
+
15
+ A *bridge* in a graph is an edge whose removal causes the number of
16
+ connected components of the graph to increase. Equivalently, a bridge is an
17
+ edge that does not belong to any cycle. Bridges are also known as cut-edges,
18
+ isthmuses, or cut arcs.
19
+
20
+ Parameters
21
+ ----------
22
+ G : undirected graph
23
+
24
+ root : node (optional)
25
+ A node in the graph `G`. If specified, only the bridges in the
26
+ connected component containing this node will be returned.
27
+
28
+ Yields
29
+ ------
30
+ e : edge
31
+ An edge in the graph whose removal disconnects the graph (or
32
+ causes the number of connected components to increase).
33
+
34
+ Raises
35
+ ------
36
+ NodeNotFound
37
+ If `root` is not in the graph `G`.
38
+
39
+ NetworkXNotImplemented
40
+ If `G` is a directed graph.
41
+
42
+ Examples
43
+ --------
44
+ The barbell graph with parameter zero has a single bridge:
45
+
46
+ >>> G = nx.barbell_graph(10, 0)
47
+ >>> list(nx.bridges(G))
48
+ [(9, 10)]
49
+
50
+ Notes
51
+ -----
52
+ This is an implementation of the algorithm described in [1]_. An edge is a
53
+ bridge if and only if it is not contained in any chain. Chains are found
54
+ using the :func:`networkx.chain_decomposition` function.
55
+
56
+ The algorithm described in [1]_ requires a simple graph. If the provided
57
+ graph is a multigraph, we convert it to a simple graph and verify that any
58
+ bridges discovered by the chain decomposition algorithm are not multi-edges.
59
+
60
+ Ignoring polylogarithmic factors, the worst-case time complexity is the
61
+ same as the :func:`networkx.chain_decomposition` function,
62
+ $O(m + n)$, where $n$ is the number of nodes in the graph and $m$ is
63
+ the number of edges.
64
+
65
+ References
66
+ ----------
67
+ .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions
68
+ """
69
+ multigraph = G.is_multigraph()
70
+ H = nx.Graph(G) if multigraph else G
71
+ chains = nx.chain_decomposition(H, root=root)
72
+ chain_edges = set(chain.from_iterable(chains))
73
+ H_copy = H.copy()
74
+ if root is not None:
75
+ H = H.subgraph(nx.node_connected_component(H, root)).copy()
76
+ for u, v in H.edges():
77
+ if (u, v) not in chain_edges and (v, u) not in chain_edges:
78
+ if multigraph and len(G[u][v]) > 1:
79
+ continue
80
+ yield u, v
81
+
82
+
83
+ @not_implemented_for("directed")
84
+ @nx._dispatchable
85
+ def has_bridges(G, root=None):
86
+ """Decide whether a graph has any bridges.
87
+
88
+ A *bridge* in a graph is an edge whose removal causes the number of
89
+ connected components of the graph to increase.
90
+
91
+ Parameters
92
+ ----------
93
+ G : undirected graph
94
+
95
+ root : node (optional)
96
+ A node in the graph `G`. If specified, only the bridges in the
97
+ connected component containing this node will be considered.
98
+
99
+ Returns
100
+ -------
101
+ bool
102
+ Whether the graph (or the connected component containing `root`)
103
+ has any bridges.
104
+
105
+ Raises
106
+ ------
107
+ NodeNotFound
108
+ If `root` is not in the graph `G`.
109
+
110
+ NetworkXNotImplemented
111
+ If `G` is a directed graph.
112
+
113
+ Examples
114
+ --------
115
+ The barbell graph with parameter zero has a single bridge::
116
+
117
+ >>> G = nx.barbell_graph(10, 0)
118
+ >>> nx.has_bridges(G)
119
+ True
120
+
121
+ On the other hand, the cycle graph has no bridges::
122
+
123
+ >>> G = nx.cycle_graph(5)
124
+ >>> nx.has_bridges(G)
125
+ False
126
+
127
+ Notes
128
+ -----
129
+ This implementation uses the :func:`networkx.bridges` function, so
130
+ it shares its worst-case time complexity, $O(m + n)$, ignoring
131
+ polylogarithmic factors, where $n$ is the number of nodes in the
132
+ graph and $m$ is the number of edges.
133
+
134
+ """
135
+ try:
136
+ next(bridges(G, root=root))
137
+ except StopIteration:
138
+ return False
139
+ else:
140
+ return True
141
+
142
+
143
+ @not_implemented_for("multigraph")
144
+ @not_implemented_for("directed")
145
+ @nx._dispatchable(edge_attrs="weight")
146
+ def local_bridges(G, with_span=True, weight=None):
147
+ """Iterate over local bridges of `G` optionally computing the span
148
+
149
+ A *local bridge* is an edge whose endpoints have no common neighbors.
150
+ That is, the edge is not part of a triangle in the graph.
151
+
152
+ The *span* of a *local bridge* is the shortest path length between
153
+ the endpoints if the local bridge is removed.
154
+
155
+ Parameters
156
+ ----------
157
+ G : undirected graph
158
+
159
+ with_span : bool
160
+ If True, yield a 3-tuple `(u, v, span)`
161
+
162
+ weight : function, string or None (default: None)
163
+ If function, used to compute edge weights for the span.
164
+ If string, the edge data attribute used in calculating span.
165
+ If None, all edges have weight 1.
166
+
167
+ Yields
168
+ ------
169
+ e : edge
170
+ The local bridges as an edge 2-tuple of nodes `(u, v)` or
171
+ as a 3-tuple `(u, v, span)` when `with_span is True`.
172
+
173
+ Raises
174
+ ------
175
+ NetworkXNotImplemented
176
+ If `G` is a directed graph or multigraph.
177
+
178
+ Examples
179
+ --------
180
+ A cycle graph has every edge a local bridge with span N-1.
181
+
182
+ >>> G = nx.cycle_graph(9)
183
+ >>> (0, 8, 8) in set(nx.local_bridges(G))
184
+ True
185
+ """
186
+ if with_span is not True:
187
+ for u, v in G.edges:
188
+ if not (set(G[u]) & set(G[v])):
189
+ yield u, v
190
+ else:
191
+ wt = nx.weighted._weight_function(G, weight)
192
+ for u, v in G.edges:
193
+ if not (set(G[u]) & set(G[v])):
194
+ enodes = {u, v}
195
+
196
+ def hide_edge(n, nbr, d):
197
+ if n not in enodes or nbr not in enodes:
198
+ return wt(n, nbr, d)
199
+ return None
200
+
201
+ try:
202
+ span = nx.shortest_path_length(G, u, v, weight=hide_edge)
203
+ yield u, v, span
204
+ except nx.NetworkXNoPath:
205
+ yield u, v, float("inf")
venv/lib/python3.10/site-packages/networkx/algorithms/broadcasting.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines to calculate the broadcast time of certain graphs.
2
+
3
+ Broadcasting is an information dissemination problem in which a node in a graph,
4
+ called the originator, must distribute a message to all other nodes by placing
5
+ a series of calls along the edges of the graph. Once informed, other nodes aid
6
+ the originator in distributing the message.
7
+
8
+ The broadcasting must be completed as quickly as possible subject to the
9
+ following constraints:
10
+ - Each call requires one unit of time.
11
+ - A node can only participate in one call per unit of time.
12
+ - Each call only involves two adjacent nodes: a sender and a receiver.
13
+ """
14
+
15
+ import networkx as nx
16
+ from networkx import NetworkXError
17
+ from networkx.utils import not_implemented_for
18
+
19
+ __all__ = [
20
+ "tree_broadcast_center",
21
+ "tree_broadcast_time",
22
+ ]
23
+
24
+
25
+ def _get_max_broadcast_value(G, U, v, values):
26
+ adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True)
27
+ return max(values[u] + i for i, u in enumerate(adj, start=1))
28
+
29
+
30
+ def _get_broadcast_centers(G, v, values, target):
31
+ adj = sorted(G.neighbors(v), key=values.get, reverse=True)
32
+ j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target)
33
+ return set([v] + adj[:j])
34
+
35
+
36
+ @not_implemented_for("directed")
37
+ @not_implemented_for("multigraph")
38
+ @nx._dispatchable
39
+ def tree_broadcast_center(G):
40
+ """Return the Broadcast Center of the tree `G`.
41
+
42
+ The broadcast center of a graph G denotes the set of nodes having
43
+ minimum broadcast time [1]_. This is a linear algorithm for determining
44
+ the broadcast center of a tree with ``N`` nodes, as a by-product it also
45
+ determines the broadcast time from the broadcast center.
46
+
47
+ Parameters
48
+ ----------
49
+ G : undirected graph
50
+ The graph should be an undirected tree
51
+
52
+ Returns
53
+ -------
54
+ BC : (int, set) tuple
55
+ minimum broadcast number of the tree, set of broadcast centers
56
+
57
+ Raises
58
+ ------
59
+ NetworkXNotImplemented
60
+ If the graph is directed or is a multigraph.
61
+
62
+ References
63
+ ----------
64
+ .. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T,
65
+ Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981)
66
+ """
67
+ # Assert that the graph G is a tree
68
+ if not nx.is_tree(G):
69
+ NetworkXError("Input graph is not a tree")
70
+ # step 0
71
+ if G.number_of_nodes() == 2:
72
+ return 1, set(G.nodes())
73
+ if G.number_of_nodes() == 1:
74
+ return 0, set(G.nodes())
75
+
76
+ # step 1
77
+ U = {node for node, deg in G.degree if deg == 1}
78
+ values = {n: 0 for n in U}
79
+ T = G.copy()
80
+ T.remove_nodes_from(U)
81
+
82
+ # step 2
83
+ W = {node for node, deg in T.degree if deg == 1}
84
+ values.update((w, G.degree[w] - 1) for w in W)
85
+
86
+ # step 3
87
+ while T.number_of_nodes() >= 2:
88
+ # step 4
89
+ w = min(W, key=lambda n: values[n])
90
+ v = next(T.neighbors(w))
91
+
92
+ # step 5
93
+ U.add(w)
94
+ W.remove(w)
95
+ T.remove_node(w)
96
+
97
+ # step 6
98
+ if T.degree(v) == 1:
99
+ # update t(v)
100
+ values.update({v: _get_max_broadcast_value(G, U, v, values)})
101
+ W.add(v)
102
+
103
+ # step 7
104
+ v = nx.utils.arbitrary_element(T)
105
+ b_T = _get_max_broadcast_value(G, U, v, values)
106
+ return b_T, _get_broadcast_centers(G, v, values, b_T)
107
+
108
+
109
+ @not_implemented_for("directed")
110
+ @not_implemented_for("multigraph")
111
+ @nx._dispatchable
112
+ def tree_broadcast_time(G, node=None):
113
+ """Return the Broadcast Time of the tree `G`.
114
+
115
+ The minimum broadcast time of a node is defined as the minimum amount
116
+ of time required to complete broadcasting starting from the
117
+ originator. The broadcast time of a graph is the maximum over
118
+ all nodes of the minimum broadcast time from that node [1]_.
119
+ This function returns the minimum broadcast time of `node`.
120
+ If `node` is None the broadcast time for the graph is returned.
121
+
122
+ Parameters
123
+ ----------
124
+ G : undirected graph
125
+ The graph should be an undirected tree
126
+ node: int, optional
127
+ index of starting node. If `None`, the algorithm returns the broadcast
128
+ time of the tree.
129
+
130
+ Returns
131
+ -------
132
+ BT : int
133
+ Broadcast Time of a node in a tree
134
+
135
+ Raises
136
+ ------
137
+ NetworkXNotImplemented
138
+ If the graph is directed or is a multigraph.
139
+
140
+ References
141
+ ----------
142
+ .. [1] Harutyunyan, H. A. and Li, Z.
143
+ "A Simple Construction of Broadcast Graphs."
144
+ In Computing and Combinatorics. COCOON 2019
145
+ (Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019.
146
+ """
147
+ b_T, b_C = tree_broadcast_center(G)
148
+ if node is not None:
149
+ return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C)
150
+ dist_from_center = dict.fromkeys(G, len(G))
151
+ for u in b_C:
152
+ for v, dist in nx.shortest_path_length(G, u).items():
153
+ if dist < dist_from_center[v]:
154
+ dist_from_center[v] = dist
155
+ return b_T + max(dist_from_center.values())
venv/lib/python3.10/site-packages/networkx/algorithms/chordal.py ADDED
@@ -0,0 +1,442 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Algorithms for chordal graphs.
3
+
4
+ A graph is chordal if every cycle of length at least 4 has a chord
5
+ (an edge joining two nodes not adjacent in the cycle).
6
+ https://en.wikipedia.org/wiki/Chordal_graph
7
+ """
8
+ import sys
9
+
10
+ import networkx as nx
11
+ from networkx.algorithms.components import connected_components
12
+ from networkx.utils import arbitrary_element, not_implemented_for
13
+
14
+ __all__ = [
15
+ "is_chordal",
16
+ "find_induced_nodes",
17
+ "chordal_graph_cliques",
18
+ "chordal_graph_treewidth",
19
+ "NetworkXTreewidthBoundExceeded",
20
+ "complete_to_chordal_graph",
21
+ ]
22
+
23
+
24
+ class NetworkXTreewidthBoundExceeded(nx.NetworkXException):
25
+ """Exception raised when a treewidth bound has been provided and it has
26
+ been exceeded"""
27
+
28
+
29
+ @not_implemented_for("directed")
30
+ @not_implemented_for("multigraph")
31
+ @nx._dispatchable
32
+ def is_chordal(G):
33
+ """Checks whether G is a chordal graph.
34
+
35
+ A graph is chordal if every cycle of length at least 4 has a chord
36
+ (an edge joining two nodes not adjacent in the cycle).
37
+
38
+ Parameters
39
+ ----------
40
+ G : graph
41
+ A NetworkX graph.
42
+
43
+ Returns
44
+ -------
45
+ chordal : bool
46
+ True if G is a chordal graph and False otherwise.
47
+
48
+ Raises
49
+ ------
50
+ NetworkXNotImplemented
51
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
52
+
53
+ Examples
54
+ --------
55
+ >>> e = [
56
+ ... (1, 2),
57
+ ... (1, 3),
58
+ ... (2, 3),
59
+ ... (2, 4),
60
+ ... (3, 4),
61
+ ... (3, 5),
62
+ ... (3, 6),
63
+ ... (4, 5),
64
+ ... (4, 6),
65
+ ... (5, 6),
66
+ ... ]
67
+ >>> G = nx.Graph(e)
68
+ >>> nx.is_chordal(G)
69
+ True
70
+
71
+ Notes
72
+ -----
73
+ The routine tries to go through every node following maximum cardinality
74
+ search. It returns False when it finds that the separator for any node
75
+ is not a clique. Based on the algorithms in [1]_.
76
+
77
+ Self loops are ignored.
78
+
79
+ References
80
+ ----------
81
+ .. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms
82
+ to test chordality of graphs, test acyclicity of hypergraphs, and
83
+ selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984),
84
+ pp. 566–579.
85
+ """
86
+ if len(G.nodes) <= 3:
87
+ return True
88
+ return len(_find_chordality_breaker(G)) == 0
89
+
90
+
91
+ @nx._dispatchable
92
+ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize):
93
+ """Returns the set of induced nodes in the path from s to t.
94
+
95
+ Parameters
96
+ ----------
97
+ G : graph
98
+ A chordal NetworkX graph
99
+ s : node
100
+ Source node to look for induced nodes
101
+ t : node
102
+ Destination node to look for induced nodes
103
+ treewidth_bound: float
104
+ Maximum treewidth acceptable for the graph H. The search
105
+ for induced nodes will end as soon as the treewidth_bound is exceeded.
106
+
107
+ Returns
108
+ -------
109
+ induced_nodes : Set of nodes
110
+ The set of induced nodes in the path from s to t in G
111
+
112
+ Raises
113
+ ------
114
+ NetworkXError
115
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
116
+ If the input graph is an instance of one of these classes, a
117
+ :exc:`NetworkXError` is raised.
118
+ The algorithm can only be applied to chordal graphs. If the input
119
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
120
+
121
+ Examples
122
+ --------
123
+ >>> G = nx.Graph()
124
+ >>> G = nx.generators.classic.path_graph(10)
125
+ >>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
126
+ >>> sorted(induced_nodes)
127
+ [1, 2, 3, 4, 5, 6, 7, 8, 9]
128
+
129
+ Notes
130
+ -----
131
+ G must be a chordal graph and (s,t) an edge that is not in G.
132
+
133
+ If a treewidth_bound is provided, the search for induced nodes will end
134
+ as soon as the treewidth_bound is exceeded.
135
+
136
+ The algorithm is inspired by Algorithm 4 in [1]_.
137
+ A formal definition of induced node can also be found on that reference.
138
+
139
+ Self Loops are ignored
140
+
141
+ References
142
+ ----------
143
+ .. [1] Learning Bounded Treewidth Bayesian Networks.
144
+ Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008.
145
+ http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf
146
+ """
147
+ if not is_chordal(G):
148
+ raise nx.NetworkXError("Input graph is not chordal.")
149
+
150
+ H = nx.Graph(G)
151
+ H.add_edge(s, t)
152
+ induced_nodes = set()
153
+ triplet = _find_chordality_breaker(H, s, treewidth_bound)
154
+ while triplet:
155
+ (u, v, w) = triplet
156
+ induced_nodes.update(triplet)
157
+ for n in triplet:
158
+ if n != s:
159
+ H.add_edge(s, n)
160
+ triplet = _find_chordality_breaker(H, s, treewidth_bound)
161
+ if induced_nodes:
162
+ # Add t and the second node in the induced path from s to t.
163
+ induced_nodes.add(t)
164
+ for u in G[s]:
165
+ if len(induced_nodes & set(G[u])) == 2:
166
+ induced_nodes.add(u)
167
+ break
168
+ return induced_nodes
169
+
170
+
171
+ @nx._dispatchable
172
+ def chordal_graph_cliques(G):
173
+ """Returns all maximal cliques of a chordal graph.
174
+
175
+ The algorithm breaks the graph in connected components and performs a
176
+ maximum cardinality search in each component to get the cliques.
177
+
178
+ Parameters
179
+ ----------
180
+ G : graph
181
+ A NetworkX graph
182
+
183
+ Yields
184
+ ------
185
+ frozenset of nodes
186
+ Maximal cliques, each of which is a frozenset of
187
+ nodes in `G`. The order of cliques is arbitrary.
188
+
189
+ Raises
190
+ ------
191
+ NetworkXError
192
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
193
+ The algorithm can only be applied to chordal graphs. If the input
194
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
195
+
196
+ Examples
197
+ --------
198
+ >>> e = [
199
+ ... (1, 2),
200
+ ... (1, 3),
201
+ ... (2, 3),
202
+ ... (2, 4),
203
+ ... (3, 4),
204
+ ... (3, 5),
205
+ ... (3, 6),
206
+ ... (4, 5),
207
+ ... (4, 6),
208
+ ... (5, 6),
209
+ ... (7, 8),
210
+ ... ]
211
+ >>> G = nx.Graph(e)
212
+ >>> G.add_node(9)
213
+ >>> cliques = [c for c in chordal_graph_cliques(G)]
214
+ >>> cliques[0]
215
+ frozenset({1, 2, 3})
216
+ """
217
+ for C in (G.subgraph(c).copy() for c in connected_components(G)):
218
+ if C.number_of_nodes() == 1:
219
+ if nx.number_of_selfloops(C) > 0:
220
+ raise nx.NetworkXError("Input graph is not chordal.")
221
+ yield frozenset(C.nodes())
222
+ else:
223
+ unnumbered = set(C.nodes())
224
+ v = arbitrary_element(C)
225
+ unnumbered.remove(v)
226
+ numbered = {v}
227
+ clique_wanna_be = {v}
228
+ while unnumbered:
229
+ v = _max_cardinality_node(C, unnumbered, numbered)
230
+ unnumbered.remove(v)
231
+ numbered.add(v)
232
+ new_clique_wanna_be = set(C.neighbors(v)) & numbered
233
+ sg = C.subgraph(clique_wanna_be)
234
+ if _is_complete_graph(sg):
235
+ new_clique_wanna_be.add(v)
236
+ if not new_clique_wanna_be >= clique_wanna_be:
237
+ yield frozenset(clique_wanna_be)
238
+ clique_wanna_be = new_clique_wanna_be
239
+ else:
240
+ raise nx.NetworkXError("Input graph is not chordal.")
241
+ yield frozenset(clique_wanna_be)
242
+
243
+
244
+ @nx._dispatchable
245
+ def chordal_graph_treewidth(G):
246
+ """Returns the treewidth of the chordal graph G.
247
+
248
+ Parameters
249
+ ----------
250
+ G : graph
251
+ A NetworkX graph
252
+
253
+ Returns
254
+ -------
255
+ treewidth : int
256
+ The size of the largest clique in the graph minus one.
257
+
258
+ Raises
259
+ ------
260
+ NetworkXError
261
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
262
+ The algorithm can only be applied to chordal graphs. If the input
263
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
264
+
265
+ Examples
266
+ --------
267
+ >>> e = [
268
+ ... (1, 2),
269
+ ... (1, 3),
270
+ ... (2, 3),
271
+ ... (2, 4),
272
+ ... (3, 4),
273
+ ... (3, 5),
274
+ ... (3, 6),
275
+ ... (4, 5),
276
+ ... (4, 6),
277
+ ... (5, 6),
278
+ ... (7, 8),
279
+ ... ]
280
+ >>> G = nx.Graph(e)
281
+ >>> G.add_node(9)
282
+ >>> nx.chordal_graph_treewidth(G)
283
+ 3
284
+
285
+ References
286
+ ----------
287
+ .. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
288
+ """
289
+ if not is_chordal(G):
290
+ raise nx.NetworkXError("Input graph is not chordal.")
291
+
292
+ max_clique = -1
293
+ for clique in nx.chordal_graph_cliques(G):
294
+ max_clique = max(max_clique, len(clique))
295
+ return max_clique - 1
296
+
297
+
298
+ def _is_complete_graph(G):
299
+ """Returns True if G is a complete graph."""
300
+ if nx.number_of_selfloops(G) > 0:
301
+ raise nx.NetworkXError("Self loop found in _is_complete_graph()")
302
+ n = G.number_of_nodes()
303
+ if n < 2:
304
+ return True
305
+ e = G.number_of_edges()
306
+ max_edges = (n * (n - 1)) / 2
307
+ return e == max_edges
308
+
309
+
310
+ def _find_missing_edge(G):
311
+ """Given a non-complete graph G, returns a missing edge."""
312
+ nodes = set(G)
313
+ for u in G:
314
+ missing = nodes - set(list(G[u].keys()) + [u])
315
+ if missing:
316
+ return (u, missing.pop())
317
+
318
+
319
+ def _max_cardinality_node(G, choices, wanna_connect):
320
+ """Returns a the node in choices that has more connections in G
321
+ to nodes in wanna_connect.
322
+ """
323
+ max_number = -1
324
+ for x in choices:
325
+ number = len([y for y in G[x] if y in wanna_connect])
326
+ if number > max_number:
327
+ max_number = number
328
+ max_cardinality_node = x
329
+ return max_cardinality_node
330
+
331
+
332
+ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize):
333
+ """Given a graph G, starts a max cardinality search
334
+ (starting from s if s is given and from an arbitrary node otherwise)
335
+ trying to find a non-chordal cycle.
336
+
337
+ If it does find one, it returns (u,v,w) where u,v,w are the three
338
+ nodes that together with s are involved in the cycle.
339
+
340
+ It ignores any self loops.
341
+ """
342
+ if len(G) == 0:
343
+ raise nx.NetworkXPointlessConcept("Graph has no nodes.")
344
+ unnumbered = set(G)
345
+ if s is None:
346
+ s = arbitrary_element(G)
347
+ unnumbered.remove(s)
348
+ numbered = {s}
349
+ current_treewidth = -1
350
+ while unnumbered: # and current_treewidth <= treewidth_bound:
351
+ v = _max_cardinality_node(G, unnumbered, numbered)
352
+ unnumbered.remove(v)
353
+ numbered.add(v)
354
+ clique_wanna_be = set(G[v]) & numbered
355
+ sg = G.subgraph(clique_wanna_be)
356
+ if _is_complete_graph(sg):
357
+ # The graph seems to be chordal by now. We update the treewidth
358
+ current_treewidth = max(current_treewidth, len(clique_wanna_be))
359
+ if current_treewidth > treewidth_bound:
360
+ raise nx.NetworkXTreewidthBoundExceeded(
361
+ f"treewidth_bound exceeded: {current_treewidth}"
362
+ )
363
+ else:
364
+ # sg is not a clique,
365
+ # look for an edge that is not included in sg
366
+ (u, w) = _find_missing_edge(sg)
367
+ return (u, v, w)
368
+ return ()
369
+
370
+
371
+ @not_implemented_for("directed")
372
+ @nx._dispatchable(returns_graph=True)
373
+ def complete_to_chordal_graph(G):
374
+ """Return a copy of G completed to a chordal graph
375
+
376
+ Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is
377
+ called chordal if for each cycle with length bigger than 3, there exist
378
+ two non-adjacent nodes connected by an edge (called a chord).
379
+
380
+ Parameters
381
+ ----------
382
+ G : NetworkX graph
383
+ Undirected graph
384
+
385
+ Returns
386
+ -------
387
+ H : NetworkX graph
388
+ The chordal enhancement of G
389
+ alpha : Dictionary
390
+ The elimination ordering of nodes of G
391
+
392
+ Notes
393
+ -----
394
+ There are different approaches to calculate the chordal
395
+ enhancement of a graph. The algorithm used here is called
396
+ MCS-M and gives at least minimal (local) triangulation of graph. Note
397
+ that this triangulation is not necessarily a global minimum.
398
+
399
+ https://en.wikipedia.org/wiki/Chordal_graph
400
+
401
+ References
402
+ ----------
403
+ .. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004)
404
+ Maximum Cardinality Search for Computing Minimal Triangulations of
405
+ Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3.
406
+
407
+ Examples
408
+ --------
409
+ >>> from networkx.algorithms.chordal import complete_to_chordal_graph
410
+ >>> G = nx.wheel_graph(10)
411
+ >>> H, alpha = complete_to_chordal_graph(G)
412
+ """
413
+ H = G.copy()
414
+ alpha = {node: 0 for node in H}
415
+ if nx.is_chordal(H):
416
+ return H, alpha
417
+ chords = set()
418
+ weight = {node: 0 for node in H.nodes()}
419
+ unnumbered_nodes = list(H.nodes())
420
+ for i in range(len(H.nodes()), 0, -1):
421
+ # get the node in unnumbered_nodes with the maximum weight
422
+ z = max(unnumbered_nodes, key=lambda node: weight[node])
423
+ unnumbered_nodes.remove(z)
424
+ alpha[z] = i
425
+ update_nodes = []
426
+ for y in unnumbered_nodes:
427
+ if G.has_edge(y, z):
428
+ update_nodes.append(y)
429
+ else:
430
+ # y_weight will be bigger than node weights between y and z
431
+ y_weight = weight[y]
432
+ lower_nodes = [
433
+ node for node in unnumbered_nodes if weight[node] < y_weight
434
+ ]
435
+ if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z):
436
+ update_nodes.append(y)
437
+ chords.add((z, y))
438
+ # during calculation of paths the weights should not be updated
439
+ for node in update_nodes:
440
+ weight[node] += 1
441
+ H.add_edges_from(chords)
442
+ return H, alpha
venv/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Communicability.
3
+ """
4
+ import networkx as nx
5
+ from networkx.utils import not_implemented_for
6
+
7
+ __all__ = ["communicability", "communicability_exp"]
8
+
9
+
10
+ @not_implemented_for("directed")
11
+ @not_implemented_for("multigraph")
12
+ @nx._dispatchable
13
+ def communicability(G):
14
+ r"""Returns communicability between all pairs of nodes in G.
15
+
16
+ The communicability between pairs of nodes in G is the sum of
17
+ walks of different lengths starting at node u and ending at node v.
18
+
19
+ Parameters
20
+ ----------
21
+ G: graph
22
+
23
+ Returns
24
+ -------
25
+ comm: dictionary of dictionaries
26
+ Dictionary of dictionaries keyed by nodes with communicability
27
+ as the value.
28
+
29
+ Raises
30
+ ------
31
+ NetworkXError
32
+ If the graph is not undirected and simple.
33
+
34
+ See Also
35
+ --------
36
+ communicability_exp:
37
+ Communicability between all pairs of nodes in G using spectral
38
+ decomposition.
39
+ communicability_betweenness_centrality:
40
+ Communicability betweenness centrality for each node in G.
41
+
42
+ Notes
43
+ -----
44
+ This algorithm uses a spectral decomposition of the adjacency matrix.
45
+ Let G=(V,E) be a simple undirected graph. Using the connection between
46
+ the powers of the adjacency matrix and the number of walks in the graph,
47
+ the communicability between nodes `u` and `v` based on the graph spectrum
48
+ is [1]_
49
+
50
+ .. math::
51
+ C(u,v)=\sum_{j=1}^{n}\phi_{j}(u)\phi_{j}(v)e^{\lambda_{j}},
52
+
53
+ where `\phi_{j}(u)` is the `u\rm{th}` element of the `j\rm{th}` orthonormal
54
+ eigenvector of the adjacency matrix associated with the eigenvalue
55
+ `\lambda_{j}`.
56
+
57
+ References
58
+ ----------
59
+ .. [1] Ernesto Estrada, Naomichi Hatano,
60
+ "Communicability in complex networks",
61
+ Phys. Rev. E 77, 036111 (2008).
62
+ https://arxiv.org/abs/0707.0756
63
+
64
+ Examples
65
+ --------
66
+ >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
67
+ >>> c = nx.communicability(G)
68
+ """
69
+ import numpy as np
70
+
71
+ nodelist = list(G) # ordering of nodes in matrix
72
+ A = nx.to_numpy_array(G, nodelist)
73
+ # convert to 0-1 matrix
74
+ A[A != 0.0] = 1
75
+ w, vec = np.linalg.eigh(A)
76
+ expw = np.exp(w)
77
+ mapping = dict(zip(nodelist, range(len(nodelist))))
78
+ c = {}
79
+ # computing communicabilities
80
+ for u in G:
81
+ c[u] = {}
82
+ for v in G:
83
+ s = 0
84
+ p = mapping[u]
85
+ q = mapping[v]
86
+ for j in range(len(nodelist)):
87
+ s += vec[:, j][p] * vec[:, j][q] * expw[j]
88
+ c[u][v] = float(s)
89
+ return c
90
+
91
+
92
+ @not_implemented_for("directed")
93
+ @not_implemented_for("multigraph")
94
+ @nx._dispatchable
95
+ def communicability_exp(G):
96
+ r"""Returns communicability between all pairs of nodes in G.
97
+
98
+ Communicability between pair of node (u,v) of node in G is the sum of
99
+ walks of different lengths starting at node u and ending at node v.
100
+
101
+ Parameters
102
+ ----------
103
+ G: graph
104
+
105
+ Returns
106
+ -------
107
+ comm: dictionary of dictionaries
108
+ Dictionary of dictionaries keyed by nodes with communicability
109
+ as the value.
110
+
111
+ Raises
112
+ ------
113
+ NetworkXError
114
+ If the graph is not undirected and simple.
115
+
116
+ See Also
117
+ --------
118
+ communicability:
119
+ Communicability between pairs of nodes in G.
120
+ communicability_betweenness_centrality:
121
+ Communicability betweenness centrality for each node in G.
122
+
123
+ Notes
124
+ -----
125
+ This algorithm uses matrix exponentiation of the adjacency matrix.
126
+
127
+ Let G=(V,E) be a simple undirected graph. Using the connection between
128
+ the powers of the adjacency matrix and the number of walks in the graph,
129
+ the communicability between nodes u and v is [1]_,
130
+
131
+ .. math::
132
+ C(u,v) = (e^A)_{uv},
133
+
134
+ where `A` is the adjacency matrix of G.
135
+
136
+ References
137
+ ----------
138
+ .. [1] Ernesto Estrada, Naomichi Hatano,
139
+ "Communicability in complex networks",
140
+ Phys. Rev. E 77, 036111 (2008).
141
+ https://arxiv.org/abs/0707.0756
142
+
143
+ Examples
144
+ --------
145
+ >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
146
+ >>> c = nx.communicability_exp(G)
147
+ """
148
+ import scipy as sp
149
+
150
+ nodelist = list(G) # ordering of nodes in matrix
151
+ A = nx.to_numpy_array(G, nodelist)
152
+ # convert to 0-1 matrix
153
+ A[A != 0.0] = 1
154
+ # communicability matrix
155
+ expA = sp.linalg.expm(A)
156
+ mapping = dict(zip(nodelist, range(len(nodelist))))
157
+ c = {}
158
+ for u in G:
159
+ c[u] = {}
160
+ for v in G:
161
+ c[u][v] = float(expA[mapping[u], mapping[v]])
162
+ return c
venv/lib/python3.10/site-packages/networkx/algorithms/covering.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Functions related to graph covers."""
2
+
3
+ from functools import partial
4
+ from itertools import chain
5
+
6
+ import networkx as nx
7
+ from networkx.utils import arbitrary_element, not_implemented_for
8
+
9
+ __all__ = ["min_edge_cover", "is_edge_cover"]
10
+
11
+
12
+ @not_implemented_for("directed")
13
+ @not_implemented_for("multigraph")
14
+ @nx._dispatchable
15
+ def min_edge_cover(G, matching_algorithm=None):
16
+ """Returns the min cardinality edge cover of the graph as a set of edges.
17
+
18
+ A smallest edge cover can be found in polynomial time by finding
19
+ a maximum matching and extending it greedily so that all nodes
20
+ are covered. This function follows that process. A maximum matching
21
+ algorithm can be specified for the first step of the algorithm.
22
+ The resulting set may return a set with one 2-tuple for each edge,
23
+ (the usual case) or with both 2-tuples `(u, v)` and `(v, u)` for
24
+ each edge. The latter is only done when a bipartite matching algorithm
25
+ is specified as `matching_algorithm`.
26
+
27
+ Parameters
28
+ ----------
29
+ G : NetworkX graph
30
+ An undirected graph.
31
+
32
+ matching_algorithm : function
33
+ A function that returns a maximum cardinality matching for `G`.
34
+ The function must take one input, the graph `G`, and return
35
+ either a set of edges (with only one direction for the pair of nodes)
36
+ or a dictionary mapping each node to its mate. If not specified,
37
+ :func:`~networkx.algorithms.matching.max_weight_matching` is used.
38
+ Common bipartite matching functions include
39
+ :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
40
+ or
41
+ :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`.
42
+
43
+ Returns
44
+ -------
45
+ min_cover : set
46
+
47
+ A set of the edges in a minimum edge cover in the form of tuples.
48
+ It contains only one of the equivalent 2-tuples `(u, v)` and `(v, u)`
49
+ for each edge. If a bipartite method is used to compute the matching,
50
+ the returned set contains both the 2-tuples `(u, v)` and `(v, u)`
51
+ for each edge of a minimum edge cover.
52
+
53
+ Examples
54
+ --------
55
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
56
+ >>> sorted(nx.min_edge_cover(G))
57
+ [(2, 1), (3, 0)]
58
+
59
+ Notes
60
+ -----
61
+ An edge cover of a graph is a set of edges such that every node of
62
+ the graph is incident to at least one edge of the set.
63
+ The minimum edge cover is an edge covering of smallest cardinality.
64
+
65
+ Due to its implementation, the worst-case running time of this algorithm
66
+ is bounded by the worst-case running time of the function
67
+ ``matching_algorithm``.
68
+
69
+ Minimum edge cover for `G` can also be found using the `min_edge_covering`
70
+ function in :mod:`networkx.algorithms.bipartite.covering` which is
71
+ simply this function with a default matching algorithm of
72
+ :func:`~networkx.algorithms.bipartite.matching.hopcraft_karp_matching`
73
+ """
74
+ if len(G) == 0:
75
+ return set()
76
+ if nx.number_of_isolates(G) > 0:
77
+ # ``min_cover`` does not exist as there is an isolated node
78
+ raise nx.NetworkXException(
79
+ "Graph has a node with no edge incident on it, so no edge cover exists."
80
+ )
81
+ if matching_algorithm is None:
82
+ matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True)
83
+ maximum_matching = matching_algorithm(G)
84
+ # ``min_cover`` is superset of ``maximum_matching``
85
+ try:
86
+ # bipartite matching algs return dict so convert if needed
87
+ min_cover = set(maximum_matching.items())
88
+ bipartite_cover = True
89
+ except AttributeError:
90
+ min_cover = maximum_matching
91
+ bipartite_cover = False
92
+ # iterate for uncovered nodes
93
+ uncovered_nodes = set(G) - {v for u, v in min_cover} - {u for u, v in min_cover}
94
+ for v in uncovered_nodes:
95
+ # Since `v` is uncovered, each edge incident to `v` will join it
96
+ # with a covered node (otherwise, if there were an edge joining
97
+ # uncovered nodes `u` and `v`, the maximum matching algorithm
98
+ # would have found it), so we can choose an arbitrary edge
99
+ # incident to `v`. (This applies only in a simple graph, not a
100
+ # multigraph.)
101
+ u = arbitrary_element(G[v])
102
+ min_cover.add((u, v))
103
+ if bipartite_cover:
104
+ min_cover.add((v, u))
105
+ return min_cover
106
+
107
+
108
+ @not_implemented_for("directed")
109
+ @nx._dispatchable
110
+ def is_edge_cover(G, cover):
111
+ """Decides whether a set of edges is a valid edge cover of the graph.
112
+
113
+ Given a set of edges, whether it is an edge covering can
114
+ be decided if we just check whether all nodes of the graph
115
+ has an edge from the set, incident on it.
116
+
117
+ Parameters
118
+ ----------
119
+ G : NetworkX graph
120
+ An undirected bipartite graph.
121
+
122
+ cover : set
123
+ Set of edges to be checked.
124
+
125
+ Returns
126
+ -------
127
+ bool
128
+ Whether the set of edges is a valid edge cover of the graph.
129
+
130
+ Examples
131
+ --------
132
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
133
+ >>> cover = {(2, 1), (3, 0)}
134
+ >>> nx.is_edge_cover(G, cover)
135
+ True
136
+
137
+ Notes
138
+ -----
139
+ An edge cover of a graph is a set of edges such that every node of
140
+ the graph is incident to at least one edge of the set.
141
+ """
142
+ return set(G) <= set(chain.from_iterable(cover))
venv/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py ADDED
@@ -0,0 +1,951 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Graph diameter, radius, eccentricity and other properties."""
2
+
3
+ import networkx as nx
4
+ from networkx.utils import not_implemented_for
5
+
6
+ __all__ = [
7
+ "eccentricity",
8
+ "diameter",
9
+ "radius",
10
+ "periphery",
11
+ "center",
12
+ "barycenter",
13
+ "resistance_distance",
14
+ "kemeny_constant",
15
+ "effective_graph_resistance",
16
+ ]
17
+
18
+
19
+ def _extrema_bounding(G, compute="diameter", weight=None):
20
+ """Compute requested extreme distance metric of undirected graph G
21
+
22
+ Computation is based on smart lower and upper bounds, and in practice
23
+ linear in the number of nodes, rather than quadratic (except for some
24
+ border cases such as complete graphs or circle shaped graphs).
25
+
26
+ Parameters
27
+ ----------
28
+ G : NetworkX graph
29
+ An undirected graph
30
+
31
+ compute : string denoting the requesting metric
32
+ "diameter" for the maximal eccentricity value,
33
+ "radius" for the minimal eccentricity value,
34
+ "periphery" for the set of nodes with eccentricity equal to the diameter,
35
+ "center" for the set of nodes with eccentricity equal to the radius,
36
+ "eccentricities" for the maximum distance from each node to all other nodes in G
37
+
38
+ weight : string, function, or None
39
+ If this is a string, then edge weights will be accessed via the
40
+ edge attribute with this key (that is, the weight of the edge
41
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
42
+ such edge attribute exists, the weight of the edge is assumed to
43
+ be one.
44
+
45
+ If this is a function, the weight of an edge is the value
46
+ returned by the function. The function must accept exactly three
47
+ positional arguments: the two endpoints of an edge and the
48
+ dictionary of edge attributes for that edge. The function must
49
+ return a number.
50
+
51
+ If this is None, every edge has weight/distance/cost 1.
52
+
53
+ Weights stored as floating point values can lead to small round-off
54
+ errors in distances. Use integer weights to avoid this.
55
+
56
+ Weights should be positive, since they are distances.
57
+
58
+ Returns
59
+ -------
60
+ value : value of the requested metric
61
+ int for "diameter" and "radius" or
62
+ list of nodes for "center" and "periphery" or
63
+ dictionary of eccentricity values keyed by node for "eccentricities"
64
+
65
+ Raises
66
+ ------
67
+ NetworkXError
68
+ If the graph consists of multiple components
69
+ ValueError
70
+ If `compute` is not one of "diameter", "radius", "periphery", "center", or "eccentricities".
71
+
72
+ Notes
73
+ -----
74
+ This algorithm was proposed in [1]_ and discussed further in [2]_ and [3]_.
75
+
76
+ References
77
+ ----------
78
+ .. [1] F. W. Takes, W. A. Kosters,
79
+ "Determining the diameter of small world networks."
80
+ Proceedings of the 20th ACM international conference on Information and knowledge management, 2011
81
+ https://dl.acm.org/doi/abs/10.1145/2063576.2063748
82
+ .. [2] F. W. Takes, W. A. Kosters,
83
+ "Computing the Eccentricity Distribution of Large Graphs."
84
+ Algorithms, 2013
85
+ https://www.mdpi.com/1999-4893/6/1/100
86
+ .. [3] M. Borassi, P. Crescenzi, M. Habib, W. A. Kosters, A. Marino, F. W. Takes,
87
+ "Fast diameter and radius BFS-based computation in (weakly connected) real-world graphs: With an application to the six degrees of separation games. "
88
+ Theoretical Computer Science, 2015
89
+ https://www.sciencedirect.com/science/article/pii/S0304397515001644
90
+ """
91
+ # init variables
92
+ degrees = dict(G.degree()) # start with the highest degree node
93
+ minlowernode = max(degrees, key=degrees.get)
94
+ N = len(degrees) # number of nodes
95
+ # alternate between smallest lower and largest upper bound
96
+ high = False
97
+ # status variables
98
+ ecc_lower = dict.fromkeys(G, 0)
99
+ ecc_upper = dict.fromkeys(G, N)
100
+ candidates = set(G)
101
+
102
+ # (re)set bound extremes
103
+ minlower = N
104
+ maxlower = 0
105
+ minupper = N
106
+ maxupper = 0
107
+
108
+ # repeat the following until there are no more candidates
109
+ while candidates:
110
+ if high:
111
+ current = maxuppernode # select node with largest upper bound
112
+ else:
113
+ current = minlowernode # select node with smallest lower bound
114
+ high = not high
115
+
116
+ # get distances from/to current node and derive eccentricity
117
+ dist = nx.shortest_path_length(G, source=current, weight=weight)
118
+
119
+ if len(dist) != N:
120
+ msg = "Cannot compute metric because graph is not connected."
121
+ raise nx.NetworkXError(msg)
122
+ current_ecc = max(dist.values())
123
+
124
+ # print status update
125
+ # print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/"
126
+ # + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is "
127
+ # + str(current_ecc))
128
+ # print(ecc_upper)
129
+
130
+ # (re)set bound extremes
131
+ maxuppernode = None
132
+ minlowernode = None
133
+
134
+ # update node bounds
135
+ for i in candidates:
136
+ # update eccentricity bounds
137
+ d = dist[i]
138
+ ecc_lower[i] = low = max(ecc_lower[i], max(d, (current_ecc - d)))
139
+ ecc_upper[i] = upp = min(ecc_upper[i], current_ecc + d)
140
+
141
+ # update min/max values of lower and upper bounds
142
+ minlower = min(ecc_lower[i], minlower)
143
+ maxlower = max(ecc_lower[i], maxlower)
144
+ minupper = min(ecc_upper[i], minupper)
145
+ maxupper = max(ecc_upper[i], maxupper)
146
+
147
+ # update candidate set
148
+ if compute == "diameter":
149
+ ruled_out = {
150
+ i
151
+ for i in candidates
152
+ if ecc_upper[i] <= maxlower and 2 * ecc_lower[i] >= maxupper
153
+ }
154
+ elif compute == "radius":
155
+ ruled_out = {
156
+ i
157
+ for i in candidates
158
+ if ecc_lower[i] >= minupper and ecc_upper[i] + 1 <= 2 * minlower
159
+ }
160
+ elif compute == "periphery":
161
+ ruled_out = {
162
+ i
163
+ for i in candidates
164
+ if ecc_upper[i] < maxlower
165
+ and (maxlower == maxupper or ecc_lower[i] > maxupper)
166
+ }
167
+ elif compute == "center":
168
+ ruled_out = {
169
+ i
170
+ for i in candidates
171
+ if ecc_lower[i] > minupper
172
+ and (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower)
173
+ }
174
+ elif compute == "eccentricities":
175
+ ruled_out = set()
176
+ else:
177
+ msg = "compute must be one of 'diameter', 'radius', 'periphery', 'center', 'eccentricities'"
178
+ raise ValueError(msg)
179
+
180
+ ruled_out.update(i for i in candidates if ecc_lower[i] == ecc_upper[i])
181
+ candidates -= ruled_out
182
+
183
+ # for i in ruled_out:
184
+ # print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"%
185
+ # (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper))
186
+ # print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"%
187
+ # (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper))
188
+ # print("NODE 4: %g"%(ecc_upper[4] <= maxlower))
189
+ # print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper))
190
+ # print("NODE 4: %g"%(ecc_upper[4] <= maxlower
191
+ # and 2 * ecc_lower[4] >= maxupper))
192
+
193
+ # updating maxuppernode and minlowernode for selection in next round
194
+ for i in candidates:
195
+ if (
196
+ minlowernode is None
197
+ or (
198
+ ecc_lower[i] == ecc_lower[minlowernode]
199
+ and degrees[i] > degrees[minlowernode]
200
+ )
201
+ or (ecc_lower[i] < ecc_lower[minlowernode])
202
+ ):
203
+ minlowernode = i
204
+
205
+ if (
206
+ maxuppernode is None
207
+ or (
208
+ ecc_upper[i] == ecc_upper[maxuppernode]
209
+ and degrees[i] > degrees[maxuppernode]
210
+ )
211
+ or (ecc_upper[i] > ecc_upper[maxuppernode])
212
+ ):
213
+ maxuppernode = i
214
+
215
+ # print status update
216
+ # print (" min=" + str(minlower) + "/" + str(minupper) +
217
+ # " max=" + str(maxlower) + "/" + str(maxupper) +
218
+ # " candidates: " + str(len(candidates)))
219
+ # print("cand:",candidates)
220
+ # print("ecc_l",ecc_lower)
221
+ # print("ecc_u",ecc_upper)
222
+ # wait = input("press Enter to continue")
223
+
224
+ # return the correct value of the requested metric
225
+ if compute == "diameter":
226
+ return maxlower
227
+ if compute == "radius":
228
+ return minupper
229
+ if compute == "periphery":
230
+ p = [v for v in G if ecc_lower[v] == maxlower]
231
+ return p
232
+ if compute == "center":
233
+ c = [v for v in G if ecc_upper[v] == minupper]
234
+ return c
235
+ if compute == "eccentricities":
236
+ return ecc_lower
237
+ return None
238
+
239
+
240
+ @nx._dispatchable(edge_attrs="weight")
241
+ def eccentricity(G, v=None, sp=None, weight=None):
242
+ """Returns the eccentricity of nodes in G.
243
+
244
+ The eccentricity of a node v is the maximum distance from v to
245
+ all other nodes in G.
246
+
247
+ Parameters
248
+ ----------
249
+ G : NetworkX graph
250
+ A graph
251
+
252
+ v : node, optional
253
+ Return value of specified node
254
+
255
+ sp : dict of dicts, optional
256
+ All pairs shortest path lengths as a dictionary of dictionaries
257
+
258
+ weight : string, function, or None (default=None)
259
+ If this is a string, then edge weights will be accessed via the
260
+ edge attribute with this key (that is, the weight of the edge
261
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
262
+ such edge attribute exists, the weight of the edge is assumed to
263
+ be one.
264
+
265
+ If this is a function, the weight of an edge is the value
266
+ returned by the function. The function must accept exactly three
267
+ positional arguments: the two endpoints of an edge and the
268
+ dictionary of edge attributes for that edge. The function must
269
+ return a number.
270
+
271
+ If this is None, every edge has weight/distance/cost 1.
272
+
273
+ Weights stored as floating point values can lead to small round-off
274
+ errors in distances. Use integer weights to avoid this.
275
+
276
+ Weights should be positive, since they are distances.
277
+
278
+ Returns
279
+ -------
280
+ ecc : dictionary
281
+ A dictionary of eccentricity values keyed by node.
282
+
283
+ Examples
284
+ --------
285
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
286
+ >>> dict(nx.eccentricity(G))
287
+ {1: 2, 2: 3, 3: 2, 4: 2, 5: 3}
288
+
289
+ >>> dict(nx.eccentricity(G, v=[1, 5])) # This returns the eccentricity of node 1 & 5
290
+ {1: 2, 5: 3}
291
+
292
+ """
293
+ # if v is None: # none, use entire graph
294
+ # nodes=G.nodes()
295
+ # elif v in G: # is v a single node
296
+ # nodes=[v]
297
+ # else: # assume v is a container of nodes
298
+ # nodes=v
299
+ order = G.order()
300
+ e = {}
301
+ for n in G.nbunch_iter(v):
302
+ if sp is None:
303
+ length = nx.shortest_path_length(G, source=n, weight=weight)
304
+
305
+ L = len(length)
306
+ else:
307
+ try:
308
+ length = sp[n]
309
+ L = len(length)
310
+ except TypeError as err:
311
+ raise nx.NetworkXError('Format of "sp" is invalid.') from err
312
+ if L != order:
313
+ if G.is_directed():
314
+ msg = (
315
+ "Found infinite path length because the digraph is not"
316
+ " strongly connected"
317
+ )
318
+ else:
319
+ msg = "Found infinite path length because the graph is not" " connected"
320
+ raise nx.NetworkXError(msg)
321
+
322
+ e[n] = max(length.values())
323
+
324
+ if v in G:
325
+ return e[v] # return single value
326
+ return e
327
+
328
+
329
+ @nx._dispatchable(edge_attrs="weight")
330
+ def diameter(G, e=None, usebounds=False, weight=None):
331
+ """Returns the diameter of the graph G.
332
+
333
+ The diameter is the maximum eccentricity.
334
+
335
+ Parameters
336
+ ----------
337
+ G : NetworkX graph
338
+ A graph
339
+
340
+ e : eccentricity dictionary, optional
341
+ A precomputed dictionary of eccentricities.
342
+
343
+ weight : string, function, or None
344
+ If this is a string, then edge weights will be accessed via the
345
+ edge attribute with this key (that is, the weight of the edge
346
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
347
+ such edge attribute exists, the weight of the edge is assumed to
348
+ be one.
349
+
350
+ If this is a function, the weight of an edge is the value
351
+ returned by the function. The function must accept exactly three
352
+ positional arguments: the two endpoints of an edge and the
353
+ dictionary of edge attributes for that edge. The function must
354
+ return a number.
355
+
356
+ If this is None, every edge has weight/distance/cost 1.
357
+
358
+ Weights stored as floating point values can lead to small round-off
359
+ errors in distances. Use integer weights to avoid this.
360
+
361
+ Weights should be positive, since they are distances.
362
+
363
+ Returns
364
+ -------
365
+ d : integer
366
+ Diameter of graph
367
+
368
+ Examples
369
+ --------
370
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
371
+ >>> nx.diameter(G)
372
+ 3
373
+
374
+ See Also
375
+ --------
376
+ eccentricity
377
+ """
378
+ if usebounds is True and e is None and not G.is_directed():
379
+ return _extrema_bounding(G, compute="diameter", weight=weight)
380
+ if e is None:
381
+ e = eccentricity(G, weight=weight)
382
+ return max(e.values())
383
+
384
+
385
+ @nx._dispatchable(edge_attrs="weight")
386
+ def periphery(G, e=None, usebounds=False, weight=None):
387
+ """Returns the periphery of the graph G.
388
+
389
+ The periphery is the set of nodes with eccentricity equal to the diameter.
390
+
391
+ Parameters
392
+ ----------
393
+ G : NetworkX graph
394
+ A graph
395
+
396
+ e : eccentricity dictionary, optional
397
+ A precomputed dictionary of eccentricities.
398
+
399
+ weight : string, function, or None
400
+ If this is a string, then edge weights will be accessed via the
401
+ edge attribute with this key (that is, the weight of the edge
402
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
403
+ such edge attribute exists, the weight of the edge is assumed to
404
+ be one.
405
+
406
+ If this is a function, the weight of an edge is the value
407
+ returned by the function. The function must accept exactly three
408
+ positional arguments: the two endpoints of an edge and the
409
+ dictionary of edge attributes for that edge. The function must
410
+ return a number.
411
+
412
+ If this is None, every edge has weight/distance/cost 1.
413
+
414
+ Weights stored as floating point values can lead to small round-off
415
+ errors in distances. Use integer weights to avoid this.
416
+
417
+ Weights should be positive, since they are distances.
418
+
419
+ Returns
420
+ -------
421
+ p : list
422
+ List of nodes in periphery
423
+
424
+ Examples
425
+ --------
426
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
427
+ >>> nx.periphery(G)
428
+ [2, 5]
429
+
430
+ See Also
431
+ --------
432
+ barycenter
433
+ center
434
+ """
435
+ if usebounds is True and e is None and not G.is_directed():
436
+ return _extrema_bounding(G, compute="periphery", weight=weight)
437
+ if e is None:
438
+ e = eccentricity(G, weight=weight)
439
+ diameter = max(e.values())
440
+ p = [v for v in e if e[v] == diameter]
441
+ return p
442
+
443
+
444
+ @nx._dispatchable(edge_attrs="weight")
445
+ def radius(G, e=None, usebounds=False, weight=None):
446
+ """Returns the radius of the graph G.
447
+
448
+ The radius is the minimum eccentricity.
449
+
450
+ Parameters
451
+ ----------
452
+ G : NetworkX graph
453
+ A graph
454
+
455
+ e : eccentricity dictionary, optional
456
+ A precomputed dictionary of eccentricities.
457
+
458
+ weight : string, function, or None
459
+ If this is a string, then edge weights will be accessed via the
460
+ edge attribute with this key (that is, the weight of the edge
461
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
462
+ such edge attribute exists, the weight of the edge is assumed to
463
+ be one.
464
+
465
+ If this is a function, the weight of an edge is the value
466
+ returned by the function. The function must accept exactly three
467
+ positional arguments: the two endpoints of an edge and the
468
+ dictionary of edge attributes for that edge. The function must
469
+ return a number.
470
+
471
+ If this is None, every edge has weight/distance/cost 1.
472
+
473
+ Weights stored as floating point values can lead to small round-off
474
+ errors in distances. Use integer weights to avoid this.
475
+
476
+ Weights should be positive, since they are distances.
477
+
478
+ Returns
479
+ -------
480
+ r : integer
481
+ Radius of graph
482
+
483
+ Examples
484
+ --------
485
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
486
+ >>> nx.radius(G)
487
+ 2
488
+
489
+ """
490
+ if usebounds is True and e is None and not G.is_directed():
491
+ return _extrema_bounding(G, compute="radius", weight=weight)
492
+ if e is None:
493
+ e = eccentricity(G, weight=weight)
494
+ return min(e.values())
495
+
496
+
497
+ @nx._dispatchable(edge_attrs="weight")
498
+ def center(G, e=None, usebounds=False, weight=None):
499
+ """Returns the center of the graph G.
500
+
501
+ The center is the set of nodes with eccentricity equal to radius.
502
+
503
+ Parameters
504
+ ----------
505
+ G : NetworkX graph
506
+ A graph
507
+
508
+ e : eccentricity dictionary, optional
509
+ A precomputed dictionary of eccentricities.
510
+
511
+ weight : string, function, or None
512
+ If this is a string, then edge weights will be accessed via the
513
+ edge attribute with this key (that is, the weight of the edge
514
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
515
+ such edge attribute exists, the weight of the edge is assumed to
516
+ be one.
517
+
518
+ If this is a function, the weight of an edge is the value
519
+ returned by the function. The function must accept exactly three
520
+ positional arguments: the two endpoints of an edge and the
521
+ dictionary of edge attributes for that edge. The function must
522
+ return a number.
523
+
524
+ If this is None, every edge has weight/distance/cost 1.
525
+
526
+ Weights stored as floating point values can lead to small round-off
527
+ errors in distances. Use integer weights to avoid this.
528
+
529
+ Weights should be positive, since they are distances.
530
+
531
+ Returns
532
+ -------
533
+ c : list
534
+ List of nodes in center
535
+
536
+ Examples
537
+ --------
538
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
539
+ >>> list(nx.center(G))
540
+ [1, 3, 4]
541
+
542
+ See Also
543
+ --------
544
+ barycenter
545
+ periphery
546
+ """
547
+ if usebounds is True and e is None and not G.is_directed():
548
+ return _extrema_bounding(G, compute="center", weight=weight)
549
+ if e is None:
550
+ e = eccentricity(G, weight=weight)
551
+ radius = min(e.values())
552
+ p = [v for v in e if e[v] == radius]
553
+ return p
554
+
555
+
556
+ @nx._dispatchable(edge_attrs="weight", mutates_input={"attr": 2})
557
+ def barycenter(G, weight=None, attr=None, sp=None):
558
+ r"""Calculate barycenter of a connected graph, optionally with edge weights.
559
+
560
+ The :dfn:`barycenter` a
561
+ :func:`connected <networkx.algorithms.components.is_connected>` graph
562
+ :math:`G` is the subgraph induced by the set of its nodes :math:`v`
563
+ minimizing the objective function
564
+
565
+ .. math::
566
+
567
+ \sum_{u \in V(G)} d_G(u, v),
568
+
569
+ where :math:`d_G` is the (possibly weighted) :func:`path length
570
+ <networkx.algorithms.shortest_paths.generic.shortest_path_length>`.
571
+ The barycenter is also called the :dfn:`median`. See [West01]_, p. 78.
572
+
573
+ Parameters
574
+ ----------
575
+ G : :class:`networkx.Graph`
576
+ The connected graph :math:`G`.
577
+ weight : :class:`str`, optional
578
+ Passed through to
579
+ :func:`~networkx.algorithms.shortest_paths.generic.shortest_path_length`.
580
+ attr : :class:`str`, optional
581
+ If given, write the value of the objective function to each node's
582
+ `attr` attribute. Otherwise do not store the value.
583
+ sp : dict of dicts, optional
584
+ All pairs shortest path lengths as a dictionary of dictionaries
585
+
586
+ Returns
587
+ -------
588
+ list
589
+ Nodes of `G` that induce the barycenter of `G`.
590
+
591
+ Raises
592
+ ------
593
+ NetworkXNoPath
594
+ If `G` is disconnected. `G` may appear disconnected to
595
+ :func:`barycenter` if `sp` is given but is missing shortest path
596
+ lengths for any pairs.
597
+ ValueError
598
+ If `sp` and `weight` are both given.
599
+
600
+ Examples
601
+ --------
602
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
603
+ >>> nx.barycenter(G)
604
+ [1, 3, 4]
605
+
606
+ See Also
607
+ --------
608
+ center
609
+ periphery
610
+ """
611
+ if sp is None:
612
+ sp = nx.shortest_path_length(G, weight=weight)
613
+ else:
614
+ sp = sp.items()
615
+ if weight is not None:
616
+ raise ValueError("Cannot use both sp, weight arguments together")
617
+ smallest, barycenter_vertices, n = float("inf"), [], len(G)
618
+ for v, dists in sp:
619
+ if len(dists) < n:
620
+ raise nx.NetworkXNoPath(
621
+ f"Input graph {G} is disconnected, so every induced subgraph "
622
+ "has infinite barycentricity."
623
+ )
624
+ barycentricity = sum(dists.values())
625
+ if attr is not None:
626
+ G.nodes[v][attr] = barycentricity
627
+ if barycentricity < smallest:
628
+ smallest = barycentricity
629
+ barycenter_vertices = [v]
630
+ elif barycentricity == smallest:
631
+ barycenter_vertices.append(v)
632
+ if attr is not None:
633
+ nx._clear_cache(G)
634
+ return barycenter_vertices
635
+
636
+
637
+ @not_implemented_for("directed")
638
+ @nx._dispatchable(edge_attrs="weight")
639
+ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True):
640
+ """Returns the resistance distance between pairs of nodes in graph G.
641
+
642
+ The resistance distance between two nodes of a graph is akin to treating
643
+ the graph as a grid of resistors with a resistance equal to the provided
644
+ weight [1]_, [2]_.
645
+
646
+ If weight is not provided, then a weight of 1 is used for all edges.
647
+
648
+ If two nodes are the same, the resistance distance is zero.
649
+
650
+ Parameters
651
+ ----------
652
+ G : NetworkX graph
653
+ A graph
654
+
655
+ nodeA : node or None, optional (default=None)
656
+ A node within graph G.
657
+ If None, compute resistance distance using all nodes as source nodes.
658
+
659
+ nodeB : node or None, optional (default=None)
660
+ A node within graph G.
661
+ If None, compute resistance distance using all nodes as target nodes.
662
+
663
+ weight : string or None, optional (default=None)
664
+ The edge data key used to compute the resistance distance.
665
+ If None, then each edge has weight 1.
666
+
667
+ invert_weight : boolean (default=True)
668
+ Proper calculation of resistance distance requires building the
669
+ Laplacian matrix with the reciprocal of the weight. Not required
670
+ if the weight is already inverted. Weight cannot be zero.
671
+
672
+ Returns
673
+ -------
674
+ rd : dict or float
675
+ If `nodeA` and `nodeB` are given, resistance distance between `nodeA`
676
+ and `nodeB`. If `nodeA` or `nodeB` is unspecified (the default), a
677
+ dictionary of nodes with resistance distances as the value.
678
+
679
+ Raises
680
+ ------
681
+ NetworkXNotImplemented
682
+ If `G` is a directed graph.
683
+
684
+ NetworkXError
685
+ If `G` is not connected, or contains no nodes,
686
+ or `nodeA` is not in `G` or `nodeB` is not in `G`.
687
+
688
+ Examples
689
+ --------
690
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
691
+ >>> round(nx.resistance_distance(G, 1, 3), 10)
692
+ 0.625
693
+
694
+ Notes
695
+ -----
696
+ The implementation is based on Theorem A in [2]_. Self-loops are ignored.
697
+ Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights.
698
+
699
+ References
700
+ ----------
701
+ .. [1] Wikipedia
702
+ "Resistance distance."
703
+ https://en.wikipedia.org/wiki/Resistance_distance
704
+ .. [2] D. J. Klein and M. Randic.
705
+ Resistance distance.
706
+ J. of Math. Chem. 12:81-95, 1993.
707
+ """
708
+ import numpy as np
709
+
710
+ if len(G) == 0:
711
+ raise nx.NetworkXError("Graph G must contain at least one node.")
712
+ if not nx.is_connected(G):
713
+ raise nx.NetworkXError("Graph G must be strongly connected.")
714
+ if nodeA is not None and nodeA not in G:
715
+ raise nx.NetworkXError("Node A is not in graph G.")
716
+ if nodeB is not None and nodeB not in G:
717
+ raise nx.NetworkXError("Node B is not in graph G.")
718
+
719
+ G = G.copy()
720
+ node_list = list(G)
721
+
722
+ # Invert weights
723
+ if invert_weight and weight is not None:
724
+ if G.is_multigraph():
725
+ for u, v, k, d in G.edges(keys=True, data=True):
726
+ d[weight] = 1 / d[weight]
727
+ else:
728
+ for u, v, d in G.edges(data=True):
729
+ d[weight] = 1 / d[weight]
730
+
731
+ # Compute resistance distance using the Pseudo-inverse of the Laplacian
732
+ # Self-loops are ignored
733
+ L = nx.laplacian_matrix(G, weight=weight).todense()
734
+ Linv = np.linalg.pinv(L, hermitian=True)
735
+
736
+ # Return relevant distances
737
+ if nodeA is not None and nodeB is not None:
738
+ i = node_list.index(nodeA)
739
+ j = node_list.index(nodeB)
740
+ return Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
741
+
742
+ elif nodeA is not None:
743
+ i = node_list.index(nodeA)
744
+ d = {}
745
+ for n in G:
746
+ j = node_list.index(n)
747
+ d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
748
+ return d
749
+
750
+ elif nodeB is not None:
751
+ j = node_list.index(nodeB)
752
+ d = {}
753
+ for n in G:
754
+ i = node_list.index(n)
755
+ d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
756
+ return d
757
+
758
+ else:
759
+ d = {}
760
+ for n in G:
761
+ i = node_list.index(n)
762
+ d[n] = {}
763
+ for n2 in G:
764
+ j = node_list.index(n2)
765
+ d[n][n2] = (
766
+ Linv.item(i, i)
767
+ + Linv.item(j, j)
768
+ - Linv.item(i, j)
769
+ - Linv.item(j, i)
770
+ )
771
+ return d
772
+
773
+
774
+ @not_implemented_for("directed")
775
+ @nx._dispatchable(edge_attrs="weight")
776
+ def effective_graph_resistance(G, weight=None, invert_weight=True):
777
+ """Returns the Effective graph resistance of G.
778
+
779
+ Also known as the Kirchhoff index.
780
+
781
+ The effective graph resistance is defined as the sum
782
+ of the resistance distance of every node pair in G [1]_.
783
+
784
+ If weight is not provided, then a weight of 1 is used for all edges.
785
+
786
+ The effective graph resistance of a disconnected graph is infinite.
787
+
788
+ Parameters
789
+ ----------
790
+ G : NetworkX graph
791
+ A graph
792
+
793
+ weight : string or None, optional (default=None)
794
+ The edge data key used to compute the effective graph resistance.
795
+ If None, then each edge has weight 1.
796
+
797
+ invert_weight : boolean (default=True)
798
+ Proper calculation of resistance distance requires building the
799
+ Laplacian matrix with the reciprocal of the weight. Not required
800
+ if the weight is already inverted. Weight cannot be zero.
801
+
802
+ Returns
803
+ -------
804
+ RG : float
805
+ The effective graph resistance of `G`.
806
+
807
+ Raises
808
+ ------
809
+ NetworkXNotImplemented
810
+ If `G` is a directed graph.
811
+
812
+ NetworkXError
813
+ If `G` does not contain any nodes.
814
+
815
+ Examples
816
+ --------
817
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
818
+ >>> round(nx.effective_graph_resistance(G), 10)
819
+ 10.25
820
+
821
+ Notes
822
+ -----
823
+ The implementation is based on Theorem 2.2 in [2]_. Self-loops are ignored.
824
+ Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights.
825
+
826
+ References
827
+ ----------
828
+ .. [1] Wolfram
829
+ "Kirchhoff Index."
830
+ https://mathworld.wolfram.com/KirchhoffIndex.html
831
+ .. [2] W. Ellens, F. M. Spieksma, P. Van Mieghem, A. Jamakovic, R. E. Kooij.
832
+ Effective graph resistance.
833
+ Lin. Alg. Appl. 435:2491-2506, 2011.
834
+ """
835
+ import numpy as np
836
+
837
+ if len(G) == 0:
838
+ raise nx.NetworkXError("Graph G must contain at least one node.")
839
+
840
+ # Disconnected graphs have infinite Effective graph resistance
841
+ if not nx.is_connected(G):
842
+ return float("inf")
843
+
844
+ # Invert weights
845
+ G = G.copy()
846
+ if invert_weight and weight is not None:
847
+ if G.is_multigraph():
848
+ for u, v, k, d in G.edges(keys=True, data=True):
849
+ d[weight] = 1 / d[weight]
850
+ else:
851
+ for u, v, d in G.edges(data=True):
852
+ d[weight] = 1 / d[weight]
853
+
854
+ # Get Laplacian eigenvalues
855
+ mu = np.sort(nx.laplacian_spectrum(G, weight=weight))
856
+
857
+ # Compute Effective graph resistance based on spectrum of the Laplacian
858
+ # Self-loops are ignored
859
+ return float(np.sum(1 / mu[1:]) * G.number_of_nodes())
860
+
861
+
862
+ @nx.utils.not_implemented_for("directed")
863
+ @nx._dispatchable(edge_attrs="weight")
864
+ def kemeny_constant(G, *, weight=None):
865
+ """Returns the Kemeny constant of the given graph.
866
+
867
+ The *Kemeny constant* (or Kemeny's constant) of a graph `G`
868
+ can be computed by regarding the graph as a Markov chain.
869
+ The Kemeny constant is then the expected number of time steps
870
+ to transition from a starting state i to a random destination state
871
+ sampled from the Markov chain's stationary distribution.
872
+ The Kemeny constant is independent of the chosen initial state [1]_.
873
+
874
+ The Kemeny constant measures the time needed for spreading
875
+ across a graph. Low values indicate a closely connected graph
876
+ whereas high values indicate a spread-out graph.
877
+
878
+ If weight is not provided, then a weight of 1 is used for all edges.
879
+
880
+ Since `G` represents a Markov chain, the weights must be positive.
881
+
882
+ Parameters
883
+ ----------
884
+ G : NetworkX graph
885
+
886
+ weight : string or None, optional (default=None)
887
+ The edge data key used to compute the Kemeny constant.
888
+ If None, then each edge has weight 1.
889
+
890
+ Returns
891
+ -------
892
+ float
893
+ The Kemeny constant of the graph `G`.
894
+
895
+ Raises
896
+ ------
897
+ NetworkXNotImplemented
898
+ If the graph `G` is directed.
899
+
900
+ NetworkXError
901
+ If the graph `G` is not connected, or contains no nodes,
902
+ or has edges with negative weights.
903
+
904
+ Examples
905
+ --------
906
+ >>> G = nx.complete_graph(5)
907
+ >>> round(nx.kemeny_constant(G), 10)
908
+ 3.2
909
+
910
+ Notes
911
+ -----
912
+ The implementation is based on equation (3.3) in [2]_.
913
+ Self-loops are allowed and indicate a Markov chain where
914
+ the state can remain the same. Multi-edges are contracted
915
+ in one edge with weight equal to the sum of the weights.
916
+
917
+ References
918
+ ----------
919
+ .. [1] Wikipedia
920
+ "Kemeny's constant."
921
+ https://en.wikipedia.org/wiki/Kemeny%27s_constant
922
+ .. [2] Lovász L.
923
+ Random walks on graphs: A survey.
924
+ Paul Erdös is Eighty, vol. 2, Bolyai Society,
925
+ Mathematical Studies, Keszthely, Hungary (1993), pp. 1-46
926
+ """
927
+ import numpy as np
928
+ import scipy as sp
929
+
930
+ if len(G) == 0:
931
+ raise nx.NetworkXError("Graph G must contain at least one node.")
932
+ if not nx.is_connected(G):
933
+ raise nx.NetworkXError("Graph G must be connected.")
934
+ if nx.is_negatively_weighted(G, weight=weight):
935
+ raise nx.NetworkXError("The weights of graph G must be nonnegative.")
936
+
937
+ # Compute matrix H = D^-1/2 A D^-1/2
938
+ A = nx.adjacency_matrix(G, weight=weight)
939
+ n, m = A.shape
940
+ diags = A.sum(axis=1)
941
+ with np.errstate(divide="ignore"):
942
+ diags_sqrt = 1.0 / np.sqrt(diags)
943
+ diags_sqrt[np.isinf(diags_sqrt)] = 0
944
+ DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr"))
945
+ H = DH @ (A @ DH)
946
+
947
+ # Compute eigenvalues of H
948
+ eig = np.sort(sp.linalg.eigvalsh(H.todense()))
949
+
950
+ # Compute the Kemeny constant
951
+ return float(np.sum(1 / (1 - eig[:-1])))
venv/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py ADDED
@@ -0,0 +1,238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ =======================
3
+ Distance-regular graphs
4
+ =======================
5
+ """
6
+
7
+ import networkx as nx
8
+ from networkx.utils import not_implemented_for
9
+
10
+ from .distance_measures import diameter
11
+
12
+ __all__ = [
13
+ "is_distance_regular",
14
+ "is_strongly_regular",
15
+ "intersection_array",
16
+ "global_parameters",
17
+ ]
18
+
19
+
20
+ @nx._dispatchable
21
+ def is_distance_regular(G):
22
+ """Returns True if the graph is distance regular, False otherwise.
23
+
24
+ A connected graph G is distance-regular if for any nodes x,y
25
+ and any integers i,j=0,1,...,d (where d is the graph
26
+ diameter), the number of vertices at distance i from x and
27
+ distance j from y depends only on i,j and the graph distance
28
+ between x and y, independently of the choice of x and y.
29
+
30
+ Parameters
31
+ ----------
32
+ G: Networkx graph (undirected)
33
+
34
+ Returns
35
+ -------
36
+ bool
37
+ True if the graph is Distance Regular, False otherwise
38
+
39
+ Examples
40
+ --------
41
+ >>> G = nx.hypercube_graph(6)
42
+ >>> nx.is_distance_regular(G)
43
+ True
44
+
45
+ See Also
46
+ --------
47
+ intersection_array, global_parameters
48
+
49
+ Notes
50
+ -----
51
+ For undirected and simple graphs only
52
+
53
+ References
54
+ ----------
55
+ .. [1] Brouwer, A. E.; Cohen, A. M.; and Neumaier, A.
56
+ Distance-Regular Graphs. New York: Springer-Verlag, 1989.
57
+ .. [2] Weisstein, Eric W. "Distance-Regular Graph."
58
+ http://mathworld.wolfram.com/Distance-RegularGraph.html
59
+
60
+ """
61
+ try:
62
+ intersection_array(G)
63
+ return True
64
+ except nx.NetworkXError:
65
+ return False
66
+
67
+
68
+ def global_parameters(b, c):
69
+ """Returns global parameters for a given intersection array.
70
+
71
+ Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d
72
+ such that for any 2 vertices x,y in G at a distance i=d(x,y), there
73
+ are exactly c_i neighbors of y at a distance of i-1 from x and b_i
74
+ neighbors of y at a distance of i+1 from x.
75
+
76
+ Thus, a distance regular graph has the global parameters,
77
+ [[c_0,a_0,b_0],[c_1,a_1,b_1],......,[c_d,a_d,b_d]] for the
78
+ intersection array [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d]
79
+ where a_i+b_i+c_i=k , k= degree of every vertex.
80
+
81
+ Parameters
82
+ ----------
83
+ b : list
84
+
85
+ c : list
86
+
87
+ Returns
88
+ -------
89
+ iterable
90
+ An iterable over three tuples.
91
+
92
+ Examples
93
+ --------
94
+ >>> G = nx.dodecahedral_graph()
95
+ >>> b, c = nx.intersection_array(G)
96
+ >>> list(nx.global_parameters(b, c))
97
+ [(0, 0, 3), (1, 0, 2), (1, 1, 1), (1, 1, 1), (2, 0, 1), (3, 0, 0)]
98
+
99
+ References
100
+ ----------
101
+ .. [1] Weisstein, Eric W. "Global Parameters."
102
+ From MathWorld--A Wolfram Web Resource.
103
+ http://mathworld.wolfram.com/GlobalParameters.html
104
+
105
+ See Also
106
+ --------
107
+ intersection_array
108
+ """
109
+ return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c))
110
+
111
+
112
+ @not_implemented_for("directed")
113
+ @not_implemented_for("multigraph")
114
+ @nx._dispatchable
115
+ def intersection_array(G):
116
+ """Returns the intersection array of a distance-regular graph.
117
+
118
+ Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d
119
+ such that for any 2 vertices x,y in G at a distance i=d(x,y), there
120
+ are exactly c_i neighbors of y at a distance of i-1 from x and b_i
121
+ neighbors of y at a distance of i+1 from x.
122
+
123
+ A distance regular graph's intersection array is given by,
124
+ [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d]
125
+
126
+ Parameters
127
+ ----------
128
+ G: Networkx graph (undirected)
129
+
130
+ Returns
131
+ -------
132
+ b,c: tuple of lists
133
+
134
+ Examples
135
+ --------
136
+ >>> G = nx.icosahedral_graph()
137
+ >>> nx.intersection_array(G)
138
+ ([5, 2, 1], [1, 2, 5])
139
+
140
+ References
141
+ ----------
142
+ .. [1] Weisstein, Eric W. "Intersection Array."
143
+ From MathWorld--A Wolfram Web Resource.
144
+ http://mathworld.wolfram.com/IntersectionArray.html
145
+
146
+ See Also
147
+ --------
148
+ global_parameters
149
+ """
150
+ # test for regular graph (all degrees must be equal)
151
+ if len(G) == 0:
152
+ raise nx.NetworkXPointlessConcept("Graph has no nodes.")
153
+ degree = iter(G.degree())
154
+ (_, k) = next(degree)
155
+ for _, knext in degree:
156
+ if knext != k:
157
+ raise nx.NetworkXError("Graph is not distance regular.")
158
+ k = knext
159
+ path_length = dict(nx.all_pairs_shortest_path_length(G))
160
+ diameter = max(max(path_length[n].values()) for n in path_length)
161
+ bint = {} # 'b' intersection array
162
+ cint = {} # 'c' intersection array
163
+ for u in G:
164
+ for v in G:
165
+ try:
166
+ i = path_length[u][v]
167
+ except KeyError as err: # graph must be connected
168
+ raise nx.NetworkXError("Graph is not distance regular.") from err
169
+ # number of neighbors of v at a distance of i-1 from u
170
+ c = len([n for n in G[v] if path_length[n][u] == i - 1])
171
+ # number of neighbors of v at a distance of i+1 from u
172
+ b = len([n for n in G[v] if path_length[n][u] == i + 1])
173
+ # b,c are independent of u and v
174
+ if cint.get(i, c) != c or bint.get(i, b) != b:
175
+ raise nx.NetworkXError("Graph is not distance regular")
176
+ bint[i] = b
177
+ cint[i] = c
178
+ return (
179
+ [bint.get(j, 0) for j in range(diameter)],
180
+ [cint.get(j + 1, 0) for j in range(diameter)],
181
+ )
182
+
183
+
184
+ # TODO There is a definition for directed strongly regular graphs.
185
+ @not_implemented_for("directed")
186
+ @not_implemented_for("multigraph")
187
+ @nx._dispatchable
188
+ def is_strongly_regular(G):
189
+ """Returns True if and only if the given graph is strongly
190
+ regular.
191
+
192
+ An undirected graph is *strongly regular* if
193
+
194
+ * it is regular,
195
+ * each pair of adjacent vertices has the same number of neighbors in
196
+ common,
197
+ * each pair of nonadjacent vertices has the same number of neighbors
198
+ in common.
199
+
200
+ Each strongly regular graph is a distance-regular graph.
201
+ Conversely, if a distance-regular graph has diameter two, then it is
202
+ a strongly regular graph. For more information on distance-regular
203
+ graphs, see :func:`is_distance_regular`.
204
+
205
+ Parameters
206
+ ----------
207
+ G : NetworkX graph
208
+ An undirected graph.
209
+
210
+ Returns
211
+ -------
212
+ bool
213
+ Whether `G` is strongly regular.
214
+
215
+ Examples
216
+ --------
217
+
218
+ The cycle graph on five vertices is strongly regular. It is
219
+ two-regular, each pair of adjacent vertices has no shared neighbors,
220
+ and each pair of nonadjacent vertices has one shared neighbor::
221
+
222
+ >>> G = nx.cycle_graph(5)
223
+ >>> nx.is_strongly_regular(G)
224
+ True
225
+
226
+ """
227
+ # Here is an alternate implementation based directly on the
228
+ # definition of strongly regular graphs:
229
+ #
230
+ # return (all_equal(G.degree().values())
231
+ # and all_equal(len(common_neighbors(G, u, v))
232
+ # for u, v in G.edges())
233
+ # and all_equal(len(common_neighbors(G, u, v))
234
+ # for u, v in non_edges(G)))
235
+ #
236
+ # We instead use the fact that a distance-regular graph of diameter
237
+ # two is strongly regular.
238
+ return is_distance_regular(G) and diameter(G) == 2
venv/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Flow Hierarchy.
3
+ """
4
+ import networkx as nx
5
+
6
+ __all__ = ["flow_hierarchy"]
7
+
8
+
9
+ @nx._dispatchable(edge_attrs="weight")
10
+ def flow_hierarchy(G, weight=None):
11
+ """Returns the flow hierarchy of a directed network.
12
+
13
+ Flow hierarchy is defined as the fraction of edges not participating
14
+ in cycles in a directed graph [1]_.
15
+
16
+ Parameters
17
+ ----------
18
+ G : DiGraph or MultiDiGraph
19
+ A directed graph
20
+
21
+ weight : string, optional (default=None)
22
+ Attribute to use for edge weights. If None the weight defaults to 1.
23
+
24
+ Returns
25
+ -------
26
+ h : float
27
+ Flow hierarchy value
28
+
29
+ Notes
30
+ -----
31
+ The algorithm described in [1]_ computes the flow hierarchy through
32
+ exponentiation of the adjacency matrix. This function implements an
33
+ alternative approach that finds strongly connected components.
34
+ An edge is in a cycle if and only if it is in a strongly connected
35
+ component, which can be found in $O(m)$ time using Tarjan's algorithm.
36
+
37
+ References
38
+ ----------
39
+ .. [1] Luo, J.; Magee, C.L. (2011),
40
+ Detecting evolving patterns of self-organizing networks by flow
41
+ hierarchy measurement, Complexity, Volume 16 Issue 6 53-61.
42
+ DOI: 10.1002/cplx.20368
43
+ http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf
44
+ """
45
+ if not G.is_directed():
46
+ raise nx.NetworkXError("G must be a digraph in flow_hierarchy")
47
+ scc = nx.strongly_connected_components(G)
48
+ return 1 - sum(G.subgraph(c).size(weight) for c in scc) / G.size(weight)
venv/lib/python3.10/site-packages/networkx/algorithms/isolate.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Functions for identifying isolate (degree zero) nodes.
3
+ """
4
+ import networkx as nx
5
+
6
+ __all__ = ["is_isolate", "isolates", "number_of_isolates"]
7
+
8
+
9
+ @nx._dispatchable
10
+ def is_isolate(G, n):
11
+ """Determines whether a node is an isolate.
12
+
13
+ An *isolate* is a node with no neighbors (that is, with degree
14
+ zero). For directed graphs, this means no in-neighbors and no
15
+ out-neighbors.
16
+
17
+ Parameters
18
+ ----------
19
+ G : NetworkX graph
20
+
21
+ n : node
22
+ A node in `G`.
23
+
24
+ Returns
25
+ -------
26
+ is_isolate : bool
27
+ True if and only if `n` has no neighbors.
28
+
29
+ Examples
30
+ --------
31
+ >>> G = nx.Graph()
32
+ >>> G.add_edge(1, 2)
33
+ >>> G.add_node(3)
34
+ >>> nx.is_isolate(G, 2)
35
+ False
36
+ >>> nx.is_isolate(G, 3)
37
+ True
38
+ """
39
+ return G.degree(n) == 0
40
+
41
+
42
+ @nx._dispatchable
43
+ def isolates(G):
44
+ """Iterator over isolates in the graph.
45
+
46
+ An *isolate* is a node with no neighbors (that is, with degree
47
+ zero). For directed graphs, this means no in-neighbors and no
48
+ out-neighbors.
49
+
50
+ Parameters
51
+ ----------
52
+ G : NetworkX graph
53
+
54
+ Returns
55
+ -------
56
+ iterator
57
+ An iterator over the isolates of `G`.
58
+
59
+ Examples
60
+ --------
61
+ To get a list of all isolates of a graph, use the :class:`list`
62
+ constructor::
63
+
64
+ >>> G = nx.Graph()
65
+ >>> G.add_edge(1, 2)
66
+ >>> G.add_node(3)
67
+ >>> list(nx.isolates(G))
68
+ [3]
69
+
70
+ To remove all isolates in the graph, first create a list of the
71
+ isolates, then use :meth:`Graph.remove_nodes_from`::
72
+
73
+ >>> G.remove_nodes_from(list(nx.isolates(G)))
74
+ >>> list(G)
75
+ [1, 2]
76
+
77
+ For digraphs, isolates have zero in-degree and zero out_degre::
78
+
79
+ >>> G = nx.DiGraph([(0, 1), (1, 2)])
80
+ >>> G.add_node(3)
81
+ >>> list(nx.isolates(G))
82
+ [3]
83
+
84
+ """
85
+ return (n for n, d in G.degree() if d == 0)
86
+
87
+
88
+ @nx._dispatchable
89
+ def number_of_isolates(G):
90
+ """Returns the number of isolates in the graph.
91
+
92
+ An *isolate* is a node with no neighbors (that is, with degree
93
+ zero). For directed graphs, this means no in-neighbors and no
94
+ out-neighbors.
95
+
96
+ Parameters
97
+ ----------
98
+ G : NetworkX graph
99
+
100
+ Returns
101
+ -------
102
+ int
103
+ The number of degree zero nodes in the graph `G`.
104
+
105
+ """
106
+ # TODO This can be parallelized.
107
+ return sum(1 for v in isolates(G))
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from networkx.algorithms.isomorphism.isomorph import *
2
+ from networkx.algorithms.isomorphism.vf2userfunc import *
3
+ from networkx.algorithms.isomorphism.matchhelpers import *
4
+ from networkx.algorithms.isomorphism.temporalisomorphvf2 import *
5
+ from networkx.algorithms.isomorphism.ismags import *
6
+ from networkx.algorithms.isomorphism.tree_isomorphism import *
7
+ from networkx.algorithms.isomorphism.vf2pp import *
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (589 Bytes). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc ADDED
Binary file (33 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc ADDED
Binary file (7.76 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc ADDED
Binary file (22.3 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc ADDED
Binary file (10.9 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc ADDED
Binary file (10.8 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc ADDED
Binary file (7.45 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc ADDED
Binary file (28.5 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc ADDED
Binary file (6.59 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py ADDED
@@ -0,0 +1,1163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ISMAGS Algorithm
3
+ ================
4
+
5
+ Provides a Python implementation of the ISMAGS algorithm. [1]_
6
+
7
+ It is capable of finding (subgraph) isomorphisms between two graphs, taking the
8
+ symmetry of the subgraph into account. In most cases the VF2 algorithm is
9
+ faster (at least on small graphs) than this implementation, but in some cases
10
+ there is an exponential number of isomorphisms that are symmetrically
11
+ equivalent. In that case, the ISMAGS algorithm will provide only one solution
12
+ per symmetry group.
13
+
14
+ >>> petersen = nx.petersen_graph()
15
+ >>> ismags = nx.isomorphism.ISMAGS(petersen, petersen)
16
+ >>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False))
17
+ >>> len(isomorphisms)
18
+ 120
19
+ >>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True))
20
+ >>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}]
21
+ >>> answer == isomorphisms
22
+ True
23
+
24
+ In addition, this implementation also provides an interface to find the
25
+ largest common induced subgraph [2]_ between any two graphs, again taking
26
+ symmetry into account. Given `graph` and `subgraph` the algorithm will remove
27
+ nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of
28
+ `graph`. Since only the symmetry of `subgraph` is taken into account it is
29
+ worth thinking about how you provide your graphs:
30
+
31
+ >>> graph1 = nx.path_graph(4)
32
+ >>> graph2 = nx.star_graph(3)
33
+ >>> ismags = nx.isomorphism.ISMAGS(graph1, graph2)
34
+ >>> ismags.is_isomorphic()
35
+ False
36
+ >>> largest_common_subgraph = list(ismags.largest_common_subgraph())
37
+ >>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}]
38
+ >>> answer == largest_common_subgraph
39
+ True
40
+ >>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1)
41
+ >>> largest_common_subgraph = list(ismags2.largest_common_subgraph())
42
+ >>> answer = [
43
+ ... {1: 0, 0: 1, 2: 2},
44
+ ... {1: 0, 0: 1, 3: 2},
45
+ ... {2: 0, 0: 1, 1: 2},
46
+ ... {2: 0, 0: 1, 3: 2},
47
+ ... {3: 0, 0: 1, 1: 2},
48
+ ... {3: 0, 0: 1, 2: 2},
49
+ ... ]
50
+ >>> answer == largest_common_subgraph
51
+ True
52
+
53
+ However, when not taking symmetry into account, it doesn't matter:
54
+
55
+ >>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False))
56
+ >>> answer = [
57
+ ... {1: 0, 0: 1, 2: 2},
58
+ ... {1: 0, 2: 1, 0: 2},
59
+ ... {2: 0, 1: 1, 3: 2},
60
+ ... {2: 0, 3: 1, 1: 2},
61
+ ... {1: 0, 0: 1, 2: 3},
62
+ ... {1: 0, 2: 1, 0: 3},
63
+ ... {2: 0, 1: 1, 3: 3},
64
+ ... {2: 0, 3: 1, 1: 3},
65
+ ... {1: 0, 0: 2, 2: 3},
66
+ ... {1: 0, 2: 2, 0: 3},
67
+ ... {2: 0, 1: 2, 3: 3},
68
+ ... {2: 0, 3: 2, 1: 3},
69
+ ... ]
70
+ >>> answer == largest_common_subgraph
71
+ True
72
+ >>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False))
73
+ >>> answer = [
74
+ ... {1: 0, 0: 1, 2: 2},
75
+ ... {1: 0, 0: 1, 3: 2},
76
+ ... {2: 0, 0: 1, 1: 2},
77
+ ... {2: 0, 0: 1, 3: 2},
78
+ ... {3: 0, 0: 1, 1: 2},
79
+ ... {3: 0, 0: 1, 2: 2},
80
+ ... {1: 1, 0: 2, 2: 3},
81
+ ... {1: 1, 0: 2, 3: 3},
82
+ ... {2: 1, 0: 2, 1: 3},
83
+ ... {2: 1, 0: 2, 3: 3},
84
+ ... {3: 1, 0: 2, 1: 3},
85
+ ... {3: 1, 0: 2, 2: 3},
86
+ ... ]
87
+ >>> answer == largest_common_subgraph
88
+ True
89
+
90
+ Notes
91
+ -----
92
+ - The current implementation works for undirected graphs only. The algorithm
93
+ in general should work for directed graphs as well though.
94
+ - Node keys for both provided graphs need to be fully orderable as well as
95
+ hashable.
96
+ - Node and edge equality is assumed to be transitive: if A is equal to B, and
97
+ B is equal to C, then A is equal to C.
98
+
99
+ References
100
+ ----------
101
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
102
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
103
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
104
+ Enumeration", PLoS One 9(5): e97896, 2014.
105
+ https://doi.org/10.1371/journal.pone.0097896
106
+ .. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
107
+ """
108
+
109
+ __all__ = ["ISMAGS"]
110
+
111
+ import itertools
112
+ from collections import Counter, defaultdict
113
+ from functools import reduce, wraps
114
+
115
+
116
+ def are_all_equal(iterable):
117
+ """
118
+ Returns ``True`` if and only if all elements in `iterable` are equal; and
119
+ ``False`` otherwise.
120
+
121
+ Parameters
122
+ ----------
123
+ iterable: collections.abc.Iterable
124
+ The container whose elements will be checked.
125
+
126
+ Returns
127
+ -------
128
+ bool
129
+ ``True`` iff all elements in `iterable` compare equal, ``False``
130
+ otherwise.
131
+ """
132
+ try:
133
+ shape = iterable.shape
134
+ except AttributeError:
135
+ pass
136
+ else:
137
+ if len(shape) > 1:
138
+ message = "The function does not works on multidimensional arrays."
139
+ raise NotImplementedError(message) from None
140
+
141
+ iterator = iter(iterable)
142
+ first = next(iterator, None)
143
+ return all(item == first for item in iterator)
144
+
145
+
146
+ def make_partitions(items, test):
147
+ """
148
+ Partitions items into sets based on the outcome of ``test(item1, item2)``.
149
+ Pairs of items for which `test` returns `True` end up in the same set.
150
+
151
+ Parameters
152
+ ----------
153
+ items : collections.abc.Iterable[collections.abc.Hashable]
154
+ Items to partition
155
+ test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable]
156
+ A function that will be called with 2 arguments, taken from items.
157
+ Should return `True` if those 2 items need to end up in the same
158
+ partition, and `False` otherwise.
159
+
160
+ Returns
161
+ -------
162
+ list[set]
163
+ A list of sets, with each set containing part of the items in `items`,
164
+ such that ``all(test(*pair) for pair in itertools.combinations(set, 2))
165
+ == True``
166
+
167
+ Notes
168
+ -----
169
+ The function `test` is assumed to be transitive: if ``test(a, b)`` and
170
+ ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``.
171
+ """
172
+ partitions = []
173
+ for item in items:
174
+ for partition in partitions:
175
+ p_item = next(iter(partition))
176
+ if test(item, p_item):
177
+ partition.add(item)
178
+ break
179
+ else: # No break
180
+ partitions.append({item})
181
+ return partitions
182
+
183
+
184
+ def partition_to_color(partitions):
185
+ """
186
+ Creates a dictionary that maps each item in each partition to the index of
187
+ the partition to which it belongs.
188
+
189
+ Parameters
190
+ ----------
191
+ partitions: collections.abc.Sequence[collections.abc.Iterable]
192
+ As returned by :func:`make_partitions`.
193
+
194
+ Returns
195
+ -------
196
+ dict
197
+ """
198
+ colors = {}
199
+ for color, keys in enumerate(partitions):
200
+ for key in keys:
201
+ colors[key] = color
202
+ return colors
203
+
204
+
205
+ def intersect(collection_of_sets):
206
+ """
207
+ Given an collection of sets, returns the intersection of those sets.
208
+
209
+ Parameters
210
+ ----------
211
+ collection_of_sets: collections.abc.Collection[set]
212
+ A collection of sets.
213
+
214
+ Returns
215
+ -------
216
+ set
217
+ An intersection of all sets in `collection_of_sets`. Will have the same
218
+ type as the item initially taken from `collection_of_sets`.
219
+ """
220
+ collection_of_sets = list(collection_of_sets)
221
+ first = collection_of_sets.pop()
222
+ out = reduce(set.intersection, collection_of_sets, set(first))
223
+ return type(first)(out)
224
+
225
+
226
+ class ISMAGS:
227
+ """
228
+ Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
229
+ "Index-based Subgraph Matching Algorithm with General Symmetries". As the
230
+ name implies, it is symmetry aware and will only generate non-symmetric
231
+ isomorphisms.
232
+
233
+ Notes
234
+ -----
235
+ The implementation imposes additional conditions compared to the VF2
236
+ algorithm on the graphs provided and the comparison functions
237
+ (:attr:`node_equality` and :attr:`edge_equality`):
238
+
239
+ - Node keys in both graphs must be orderable as well as hashable.
240
+ - Equality must be transitive: if A is equal to B, and B is equal to C,
241
+ then A must be equal to C.
242
+
243
+ Attributes
244
+ ----------
245
+ graph: networkx.Graph
246
+ subgraph: networkx.Graph
247
+ node_equality: collections.abc.Callable
248
+ The function called to see if two nodes should be considered equal.
249
+ It's signature looks like this:
250
+ ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``.
251
+ `node1` is a node in `graph1`, and `node2` a node in `graph2`.
252
+ Constructed from the argument `node_match`.
253
+ edge_equality: collections.abc.Callable
254
+ The function called to see if two edges should be considered equal.
255
+ It's signature looks like this:
256
+ ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``.
257
+ `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`.
258
+ Constructed from the argument `edge_match`.
259
+
260
+ References
261
+ ----------
262
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
263
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
264
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
265
+ Enumeration", PLoS One 9(5): e97896, 2014.
266
+ https://doi.org/10.1371/journal.pone.0097896
267
+ """
268
+
269
+ def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None):
270
+ """
271
+ Parameters
272
+ ----------
273
+ graph: networkx.Graph
274
+ subgraph: networkx.Graph
275
+ node_match: collections.abc.Callable or None
276
+ Function used to determine whether two nodes are equivalent. Its
277
+ signature should look like ``f(n1: dict, n2: dict) -> bool``, with
278
+ `n1` and `n2` node property dicts. See also
279
+ :func:`~networkx.algorithms.isomorphism.categorical_node_match` and
280
+ friends.
281
+ If `None`, all nodes are considered equal.
282
+ edge_match: collections.abc.Callable or None
283
+ Function used to determine whether two edges are equivalent. Its
284
+ signature should look like ``f(e1: dict, e2: dict) -> bool``, with
285
+ `e1` and `e2` edge property dicts. See also
286
+ :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and
287
+ friends.
288
+ If `None`, all edges are considered equal.
289
+ cache: collections.abc.Mapping
290
+ A cache used for caching graph symmetries.
291
+ """
292
+ # TODO: graph and subgraph setter methods that invalidate the caches.
293
+ # TODO: allow for precomputed partitions and colors
294
+ self.graph = graph
295
+ self.subgraph = subgraph
296
+ self._symmetry_cache = cache
297
+ # Naming conventions are taken from the original paper. For your
298
+ # sanity:
299
+ # sg: subgraph
300
+ # g: graph
301
+ # e: edge(s)
302
+ # n: node(s)
303
+ # So: sgn means "subgraph nodes".
304
+ self._sgn_partitions_ = None
305
+ self._sge_partitions_ = None
306
+
307
+ self._sgn_colors_ = None
308
+ self._sge_colors_ = None
309
+
310
+ self._gn_partitions_ = None
311
+ self._ge_partitions_ = None
312
+
313
+ self._gn_colors_ = None
314
+ self._ge_colors_ = None
315
+
316
+ self._node_compat_ = None
317
+ self._edge_compat_ = None
318
+
319
+ if node_match is None:
320
+ self.node_equality = self._node_match_maker(lambda n1, n2: True)
321
+ self._sgn_partitions_ = [set(self.subgraph.nodes)]
322
+ self._gn_partitions_ = [set(self.graph.nodes)]
323
+ self._node_compat_ = {0: 0}
324
+ else:
325
+ self.node_equality = self._node_match_maker(node_match)
326
+ if edge_match is None:
327
+ self.edge_equality = self._edge_match_maker(lambda e1, e2: True)
328
+ self._sge_partitions_ = [set(self.subgraph.edges)]
329
+ self._ge_partitions_ = [set(self.graph.edges)]
330
+ self._edge_compat_ = {0: 0}
331
+ else:
332
+ self.edge_equality = self._edge_match_maker(edge_match)
333
+
334
+ @property
335
+ def _sgn_partitions(self):
336
+ if self._sgn_partitions_ is None:
337
+
338
+ def nodematch(node1, node2):
339
+ return self.node_equality(self.subgraph, node1, self.subgraph, node2)
340
+
341
+ self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch)
342
+ return self._sgn_partitions_
343
+
344
+ @property
345
+ def _sge_partitions(self):
346
+ if self._sge_partitions_ is None:
347
+
348
+ def edgematch(edge1, edge2):
349
+ return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2)
350
+
351
+ self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch)
352
+ return self._sge_partitions_
353
+
354
+ @property
355
+ def _gn_partitions(self):
356
+ if self._gn_partitions_ is None:
357
+
358
+ def nodematch(node1, node2):
359
+ return self.node_equality(self.graph, node1, self.graph, node2)
360
+
361
+ self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch)
362
+ return self._gn_partitions_
363
+
364
+ @property
365
+ def _ge_partitions(self):
366
+ if self._ge_partitions_ is None:
367
+
368
+ def edgematch(edge1, edge2):
369
+ return self.edge_equality(self.graph, edge1, self.graph, edge2)
370
+
371
+ self._ge_partitions_ = make_partitions(self.graph.edges, edgematch)
372
+ return self._ge_partitions_
373
+
374
+ @property
375
+ def _sgn_colors(self):
376
+ if self._sgn_colors_ is None:
377
+ self._sgn_colors_ = partition_to_color(self._sgn_partitions)
378
+ return self._sgn_colors_
379
+
380
+ @property
381
+ def _sge_colors(self):
382
+ if self._sge_colors_ is None:
383
+ self._sge_colors_ = partition_to_color(self._sge_partitions)
384
+ return self._sge_colors_
385
+
386
+ @property
387
+ def _gn_colors(self):
388
+ if self._gn_colors_ is None:
389
+ self._gn_colors_ = partition_to_color(self._gn_partitions)
390
+ return self._gn_colors_
391
+
392
+ @property
393
+ def _ge_colors(self):
394
+ if self._ge_colors_ is None:
395
+ self._ge_colors_ = partition_to_color(self._ge_partitions)
396
+ return self._ge_colors_
397
+
398
+ @property
399
+ def _node_compatibility(self):
400
+ if self._node_compat_ is not None:
401
+ return self._node_compat_
402
+ self._node_compat_ = {}
403
+ for sgn_part_color, gn_part_color in itertools.product(
404
+ range(len(self._sgn_partitions)), range(len(self._gn_partitions))
405
+ ):
406
+ sgn = next(iter(self._sgn_partitions[sgn_part_color]))
407
+ gn = next(iter(self._gn_partitions[gn_part_color]))
408
+ if self.node_equality(self.subgraph, sgn, self.graph, gn):
409
+ self._node_compat_[sgn_part_color] = gn_part_color
410
+ return self._node_compat_
411
+
412
+ @property
413
+ def _edge_compatibility(self):
414
+ if self._edge_compat_ is not None:
415
+ return self._edge_compat_
416
+ self._edge_compat_ = {}
417
+ for sge_part_color, ge_part_color in itertools.product(
418
+ range(len(self._sge_partitions)), range(len(self._ge_partitions))
419
+ ):
420
+ sge = next(iter(self._sge_partitions[sge_part_color]))
421
+ ge = next(iter(self._ge_partitions[ge_part_color]))
422
+ if self.edge_equality(self.subgraph, sge, self.graph, ge):
423
+ self._edge_compat_[sge_part_color] = ge_part_color
424
+ return self._edge_compat_
425
+
426
+ @staticmethod
427
+ def _node_match_maker(cmp):
428
+ @wraps(cmp)
429
+ def comparer(graph1, node1, graph2, node2):
430
+ return cmp(graph1.nodes[node1], graph2.nodes[node2])
431
+
432
+ return comparer
433
+
434
+ @staticmethod
435
+ def _edge_match_maker(cmp):
436
+ @wraps(cmp)
437
+ def comparer(graph1, edge1, graph2, edge2):
438
+ return cmp(graph1.edges[edge1], graph2.edges[edge2])
439
+
440
+ return comparer
441
+
442
+ def find_isomorphisms(self, symmetry=True):
443
+ """Find all subgraph isomorphisms between subgraph and graph
444
+
445
+ Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`.
446
+
447
+ Parameters
448
+ ----------
449
+ symmetry: bool
450
+ Whether symmetry should be taken into account. If False, found
451
+ isomorphisms may be symmetrically equivalent.
452
+
453
+ Yields
454
+ ------
455
+ dict
456
+ The found isomorphism mappings of {graph_node: subgraph_node}.
457
+ """
458
+ # The networkx VF2 algorithm is slightly funny in when it yields an
459
+ # empty dict and when not.
460
+ if not self.subgraph:
461
+ yield {}
462
+ return
463
+ elif not self.graph:
464
+ return
465
+ elif len(self.graph) < len(self.subgraph):
466
+ return
467
+
468
+ if symmetry:
469
+ _, cosets = self.analyze_symmetry(
470
+ self.subgraph, self._sgn_partitions, self._sge_colors
471
+ )
472
+ constraints = self._make_constraints(cosets)
473
+ else:
474
+ constraints = []
475
+
476
+ candidates = self._find_nodecolor_candidates()
477
+ la_candidates = self._get_lookahead_candidates()
478
+ for sgn in self.subgraph:
479
+ extra_candidates = la_candidates[sgn]
480
+ if extra_candidates:
481
+ candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)}
482
+
483
+ if any(candidates.values()):
484
+ start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len))
485
+ candidates[start_sgn] = (intersect(candidates[start_sgn]),)
486
+ yield from self._map_nodes(start_sgn, candidates, constraints)
487
+ else:
488
+ return
489
+
490
+ @staticmethod
491
+ def _find_neighbor_color_count(graph, node, node_color, edge_color):
492
+ """
493
+ For `node` in `graph`, count the number of edges of a specific color
494
+ it has to nodes of a specific color.
495
+ """
496
+ counts = Counter()
497
+ neighbors = graph[node]
498
+ for neighbor in neighbors:
499
+ n_color = node_color[neighbor]
500
+ if (node, neighbor) in edge_color:
501
+ e_color = edge_color[node, neighbor]
502
+ else:
503
+ e_color = edge_color[neighbor, node]
504
+ counts[e_color, n_color] += 1
505
+ return counts
506
+
507
+ def _get_lookahead_candidates(self):
508
+ """
509
+ Returns a mapping of {subgraph node: collection of graph nodes} for
510
+ which the graph nodes are feasible candidates for the subgraph node, as
511
+ determined by looking ahead one edge.
512
+ """
513
+ g_counts = {}
514
+ for gn in self.graph:
515
+ g_counts[gn] = self._find_neighbor_color_count(
516
+ self.graph, gn, self._gn_colors, self._ge_colors
517
+ )
518
+ candidates = defaultdict(set)
519
+ for sgn in self.subgraph:
520
+ sg_count = self._find_neighbor_color_count(
521
+ self.subgraph, sgn, self._sgn_colors, self._sge_colors
522
+ )
523
+ new_sg_count = Counter()
524
+ for (sge_color, sgn_color), count in sg_count.items():
525
+ try:
526
+ ge_color = self._edge_compatibility[sge_color]
527
+ gn_color = self._node_compatibility[sgn_color]
528
+ except KeyError:
529
+ pass
530
+ else:
531
+ new_sg_count[ge_color, gn_color] = count
532
+
533
+ for gn, g_count in g_counts.items():
534
+ if all(new_sg_count[x] <= g_count[x] for x in new_sg_count):
535
+ # Valid candidate
536
+ candidates[sgn].add(gn)
537
+ return candidates
538
+
539
+ def largest_common_subgraph(self, symmetry=True):
540
+ """
541
+ Find the largest common induced subgraphs between :attr:`subgraph` and
542
+ :attr:`graph`.
543
+
544
+ Parameters
545
+ ----------
546
+ symmetry: bool
547
+ Whether symmetry should be taken into account. If False, found
548
+ largest common subgraphs may be symmetrically equivalent.
549
+
550
+ Yields
551
+ ------
552
+ dict
553
+ The found isomorphism mappings of {graph_node: subgraph_node}.
554
+ """
555
+ # The networkx VF2 algorithm is slightly funny in when it yields an
556
+ # empty dict and when not.
557
+ if not self.subgraph:
558
+ yield {}
559
+ return
560
+ elif not self.graph:
561
+ return
562
+
563
+ if symmetry:
564
+ _, cosets = self.analyze_symmetry(
565
+ self.subgraph, self._sgn_partitions, self._sge_colors
566
+ )
567
+ constraints = self._make_constraints(cosets)
568
+ else:
569
+ constraints = []
570
+
571
+ candidates = self._find_nodecolor_candidates()
572
+
573
+ if any(candidates.values()):
574
+ yield from self._largest_common_subgraph(candidates, constraints)
575
+ else:
576
+ return
577
+
578
+ def analyze_symmetry(self, graph, node_partitions, edge_colors):
579
+ """
580
+ Find a minimal set of permutations and corresponding co-sets that
581
+ describe the symmetry of `graph`, given the node and edge equalities
582
+ given by `node_partitions` and `edge_colors`, respectively.
583
+
584
+ Parameters
585
+ ----------
586
+ graph : networkx.Graph
587
+ The graph whose symmetry should be analyzed.
588
+ node_partitions : list of sets
589
+ A list of sets containing node keys. Node keys in the same set
590
+ are considered equivalent. Every node key in `graph` should be in
591
+ exactly one of the sets. If all nodes are equivalent, this should
592
+ be ``[set(graph.nodes)]``.
593
+ edge_colors : dict mapping edges to their colors
594
+ A dict mapping every edge in `graph` to its corresponding color.
595
+ Edges with the same color are considered equivalent. If all edges
596
+ are equivalent, this should be ``{e: 0 for e in graph.edges}``.
597
+
598
+
599
+ Returns
600
+ -------
601
+ set[frozenset]
602
+ The found permutations. This is a set of frozensets of pairs of node
603
+ keys which can be exchanged without changing :attr:`subgraph`.
604
+ dict[collections.abc.Hashable, set[collections.abc.Hashable]]
605
+ The found co-sets. The co-sets is a dictionary of
606
+ ``{node key: set of node keys}``.
607
+ Every key-value pair describes which ``values`` can be interchanged
608
+ without changing nodes less than ``key``.
609
+ """
610
+ if self._symmetry_cache is not None:
611
+ key = hash(
612
+ (
613
+ tuple(graph.nodes),
614
+ tuple(graph.edges),
615
+ tuple(map(tuple, node_partitions)),
616
+ tuple(edge_colors.items()),
617
+ )
618
+ )
619
+ if key in self._symmetry_cache:
620
+ return self._symmetry_cache[key]
621
+ node_partitions = list(
622
+ self._refine_node_partitions(graph, node_partitions, edge_colors)
623
+ )
624
+ assert len(node_partitions) == 1
625
+ node_partitions = node_partitions[0]
626
+ permutations, cosets = self._process_ordered_pair_partitions(
627
+ graph, node_partitions, node_partitions, edge_colors
628
+ )
629
+ if self._symmetry_cache is not None:
630
+ self._symmetry_cache[key] = permutations, cosets
631
+ return permutations, cosets
632
+
633
+ def is_isomorphic(self, symmetry=False):
634
+ """
635
+ Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and
636
+ False otherwise.
637
+
638
+ Returns
639
+ -------
640
+ bool
641
+ """
642
+ return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(
643
+ symmetry
644
+ )
645
+
646
+ def subgraph_is_isomorphic(self, symmetry=False):
647
+ """
648
+ Returns True if a subgraph of :attr:`graph` is isomorphic to
649
+ :attr:`subgraph` and False otherwise.
650
+
651
+ Returns
652
+ -------
653
+ bool
654
+ """
655
+ # symmetry=False, since we only need to know whether there is any
656
+ # example; figuring out all symmetry elements probably costs more time
657
+ # than it gains.
658
+ isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None)
659
+ return isom is not None
660
+
661
+ def isomorphisms_iter(self, symmetry=True):
662
+ """
663
+ Does the same as :meth:`find_isomorphisms` if :attr:`graph` and
664
+ :attr:`subgraph` have the same number of nodes.
665
+ """
666
+ if len(self.graph) == len(self.subgraph):
667
+ yield from self.subgraph_isomorphisms_iter(symmetry=symmetry)
668
+
669
+ def subgraph_isomorphisms_iter(self, symmetry=True):
670
+ """Alternative name for :meth:`find_isomorphisms`."""
671
+ return self.find_isomorphisms(symmetry)
672
+
673
+ def _find_nodecolor_candidates(self):
674
+ """
675
+ Per node in subgraph find all nodes in graph that have the same color.
676
+ """
677
+ candidates = defaultdict(set)
678
+ for sgn in self.subgraph.nodes:
679
+ sgn_color = self._sgn_colors[sgn]
680
+ if sgn_color in self._node_compatibility:
681
+ gn_color = self._node_compatibility[sgn_color]
682
+ candidates[sgn].add(frozenset(self._gn_partitions[gn_color]))
683
+ else:
684
+ candidates[sgn].add(frozenset())
685
+ candidates = dict(candidates)
686
+ for sgn, options in candidates.items():
687
+ candidates[sgn] = frozenset(options)
688
+ return candidates
689
+
690
+ @staticmethod
691
+ def _make_constraints(cosets):
692
+ """
693
+ Turn cosets into constraints.
694
+ """
695
+ constraints = []
696
+ for node_i, node_ts in cosets.items():
697
+ for node_t in node_ts:
698
+ if node_i != node_t:
699
+ # Node i must be smaller than node t.
700
+ constraints.append((node_i, node_t))
701
+ return constraints
702
+
703
+ @staticmethod
704
+ def _find_node_edge_color(graph, node_colors, edge_colors):
705
+ """
706
+ For every node in graph, come up with a color that combines 1) the
707
+ color of the node, and 2) the number of edges of a color to each type
708
+ of node.
709
+ """
710
+ counts = defaultdict(lambda: defaultdict(int))
711
+ for node1, node2 in graph.edges:
712
+ if (node1, node2) in edge_colors:
713
+ # FIXME directed graphs
714
+ ecolor = edge_colors[node1, node2]
715
+ else:
716
+ ecolor = edge_colors[node2, node1]
717
+ # Count per node how many edges it has of what color to nodes of
718
+ # what color
719
+ counts[node1][ecolor, node_colors[node2]] += 1
720
+ counts[node2][ecolor, node_colors[node1]] += 1
721
+
722
+ node_edge_colors = {}
723
+ for node in graph.nodes:
724
+ node_edge_colors[node] = node_colors[node], set(counts[node].items())
725
+
726
+ return node_edge_colors
727
+
728
+ @staticmethod
729
+ def _get_permutations_by_length(items):
730
+ """
731
+ Get all permutations of items, but only permute items with the same
732
+ length.
733
+
734
+ >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]]))
735
+ >>> answer = [
736
+ ... (([1], [2]), ([3, 4], [4, 5])),
737
+ ... (([1], [2]), ([4, 5], [3, 4])),
738
+ ... (([2], [1]), ([3, 4], [4, 5])),
739
+ ... (([2], [1]), ([4, 5], [3, 4])),
740
+ ... ]
741
+ >>> found == answer
742
+ True
743
+ """
744
+ by_len = defaultdict(list)
745
+ for item in items:
746
+ by_len[len(item)].append(item)
747
+
748
+ yield from itertools.product(
749
+ *(itertools.permutations(by_len[l]) for l in sorted(by_len))
750
+ )
751
+
752
+ @classmethod
753
+ def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False):
754
+ """
755
+ Given a partition of nodes in graph, make the partitions smaller such
756
+ that all nodes in a partition have 1) the same color, and 2) the same
757
+ number of edges to specific other partitions.
758
+ """
759
+
760
+ def equal_color(node1, node2):
761
+ return node_edge_colors[node1] == node_edge_colors[node2]
762
+
763
+ node_partitions = list(node_partitions)
764
+ node_colors = partition_to_color(node_partitions)
765
+ node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors)
766
+ if all(
767
+ are_all_equal(node_edge_colors[node] for node in partition)
768
+ for partition in node_partitions
769
+ ):
770
+ yield node_partitions
771
+ return
772
+
773
+ new_partitions = []
774
+ output = [new_partitions]
775
+ for partition in node_partitions:
776
+ if not are_all_equal(node_edge_colors[node] for node in partition):
777
+ refined = make_partitions(partition, equal_color)
778
+ if (
779
+ branch
780
+ and len(refined) != 1
781
+ and len({len(r) for r in refined}) != len([len(r) for r in refined])
782
+ ):
783
+ # This is where it breaks. There are multiple new cells
784
+ # in refined with the same length, and their order
785
+ # matters.
786
+ # So option 1) Hit it with a big hammer and simply make all
787
+ # orderings.
788
+ permutations = cls._get_permutations_by_length(refined)
789
+ new_output = []
790
+ for n_p in output:
791
+ for permutation in permutations:
792
+ new_output.append(n_p + list(permutation[0]))
793
+ output = new_output
794
+ else:
795
+ for n_p in output:
796
+ n_p.extend(sorted(refined, key=len))
797
+ else:
798
+ for n_p in output:
799
+ n_p.append(partition)
800
+ for n_p in output:
801
+ yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch)
802
+
803
+ def _edges_of_same_color(self, sgn1, sgn2):
804
+ """
805
+ Returns all edges in :attr:`graph` that have the same colour as the
806
+ edge between sgn1 and sgn2 in :attr:`subgraph`.
807
+ """
808
+ if (sgn1, sgn2) in self._sge_colors:
809
+ # FIXME directed graphs
810
+ sge_color = self._sge_colors[sgn1, sgn2]
811
+ else:
812
+ sge_color = self._sge_colors[sgn2, sgn1]
813
+ if sge_color in self._edge_compatibility:
814
+ ge_color = self._edge_compatibility[sge_color]
815
+ g_edges = self._ge_partitions[ge_color]
816
+ else:
817
+ g_edges = []
818
+ return g_edges
819
+
820
+ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None):
821
+ """
822
+ Find all subgraph isomorphisms honoring constraints.
823
+ """
824
+ if mapping is None:
825
+ mapping = {}
826
+ else:
827
+ mapping = mapping.copy()
828
+ if to_be_mapped is None:
829
+ to_be_mapped = set(self.subgraph.nodes)
830
+
831
+ # Note, we modify candidates here. Doesn't seem to affect results, but
832
+ # remember this.
833
+ # candidates = candidates.copy()
834
+ sgn_candidates = intersect(candidates[sgn])
835
+ candidates[sgn] = frozenset([sgn_candidates])
836
+ for gn in sgn_candidates:
837
+ # We're going to try to map sgn to gn.
838
+ if gn in mapping.values() or sgn not in to_be_mapped:
839
+ # gn is already mapped to something
840
+ continue # pragma: no cover
841
+
842
+ # REDUCTION and COMBINATION
843
+ mapping[sgn] = gn
844
+ # BASECASE
845
+ if to_be_mapped == set(mapping.keys()):
846
+ yield {v: k for k, v in mapping.items()}
847
+ continue
848
+ left_to_map = to_be_mapped - set(mapping.keys())
849
+
850
+ new_candidates = candidates.copy()
851
+ sgn_nbrs = set(self.subgraph[sgn])
852
+ not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn])
853
+ for sgn2 in left_to_map:
854
+ if sgn2 not in sgn_nbrs:
855
+ gn2_options = not_gn_nbrs
856
+ else:
857
+ # Get all edges to gn of the right color:
858
+ g_edges = self._edges_of_same_color(sgn, sgn2)
859
+ # FIXME directed graphs
860
+ # And all nodes involved in those which are connected to gn
861
+ gn2_options = {n for e in g_edges for n in e if gn in e}
862
+ # Node color compatibility should be taken care of by the
863
+ # initial candidate lists made by find_subgraphs
864
+
865
+ # Add gn2_options to the right collection. Since new_candidates
866
+ # is a dict of frozensets of frozensets of node indices it's
867
+ # a bit clunky. We can't do .add, and + also doesn't work. We
868
+ # could do |, but I deem union to be clearer.
869
+ new_candidates[sgn2] = new_candidates[sgn2].union(
870
+ [frozenset(gn2_options)]
871
+ )
872
+
873
+ if (sgn, sgn2) in constraints:
874
+ gn2_options = {gn2 for gn2 in self.graph if gn2 > gn}
875
+ elif (sgn2, sgn) in constraints:
876
+ gn2_options = {gn2 for gn2 in self.graph if gn2 < gn}
877
+ else:
878
+ continue # pragma: no cover
879
+ new_candidates[sgn2] = new_candidates[sgn2].union(
880
+ [frozenset(gn2_options)]
881
+ )
882
+
883
+ # The next node is the one that is unmapped and has fewest
884
+ # candidates
885
+ next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len))
886
+ yield from self._map_nodes(
887
+ next_sgn,
888
+ new_candidates,
889
+ constraints,
890
+ mapping=mapping,
891
+ to_be_mapped=to_be_mapped,
892
+ )
893
+ # Unmap sgn-gn. Strictly not necessary since it'd get overwritten
894
+ # when making a new mapping for sgn.
895
+ # del mapping[sgn]
896
+
897
+ def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None):
898
+ """
899
+ Find all largest common subgraphs honoring constraints.
900
+ """
901
+ if to_be_mapped is None:
902
+ to_be_mapped = {frozenset(self.subgraph.nodes)}
903
+
904
+ # The LCS problem is basically a repeated subgraph isomorphism problem
905
+ # with smaller and smaller subgraphs. We store the nodes that are
906
+ # "part of" the subgraph in to_be_mapped, and we make it a little
907
+ # smaller every iteration.
908
+
909
+ current_size = len(next(iter(to_be_mapped), []))
910
+
911
+ found_iso = False
912
+ if current_size <= len(self.graph):
913
+ # There's no point in trying to find isomorphisms of
914
+ # graph >= subgraph if subgraph has more nodes than graph.
915
+
916
+ # Try the isomorphism first with the nodes with lowest ID. So sort
917
+ # them. Those are more likely to be part of the final
918
+ # correspondence. This makes finding the first answer(s) faster. In
919
+ # theory.
920
+ for nodes in sorted(to_be_mapped, key=sorted):
921
+ # Find the isomorphism between subgraph[to_be_mapped] <= graph
922
+ next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len))
923
+ isomorphs = self._map_nodes(
924
+ next_sgn, candidates, constraints, to_be_mapped=nodes
925
+ )
926
+
927
+ # This is effectively `yield from isomorphs`, except that we look
928
+ # whether an item was yielded.
929
+ try:
930
+ item = next(isomorphs)
931
+ except StopIteration:
932
+ pass
933
+ else:
934
+ yield item
935
+ yield from isomorphs
936
+ found_iso = True
937
+
938
+ # BASECASE
939
+ if found_iso or current_size == 1:
940
+ # Shrinking has no point because either 1) we end up with a smaller
941
+ # common subgraph (and we want the largest), or 2) there'll be no
942
+ # more subgraph.
943
+ return
944
+
945
+ left_to_be_mapped = set()
946
+ for nodes in to_be_mapped:
947
+ for sgn in nodes:
948
+ # We're going to remove sgn from to_be_mapped, but subject to
949
+ # symmetry constraints. We know that for every constraint we
950
+ # have those subgraph nodes are equal. So whenever we would
951
+ # remove the lower part of a constraint, remove the higher
952
+ # instead. This is all dealth with by _remove_node. And because
953
+ # left_to_be_mapped is a set, we don't do double work.
954
+
955
+ # And finally, make the subgraph one node smaller.
956
+ # REDUCTION
957
+ new_nodes = self._remove_node(sgn, nodes, constraints)
958
+ left_to_be_mapped.add(new_nodes)
959
+ # COMBINATION
960
+ yield from self._largest_common_subgraph(
961
+ candidates, constraints, to_be_mapped=left_to_be_mapped
962
+ )
963
+
964
+ @staticmethod
965
+ def _remove_node(node, nodes, constraints):
966
+ """
967
+ Returns a new set where node has been removed from nodes, subject to
968
+ symmetry constraints. We know, that for every constraint we have
969
+ those subgraph nodes are equal. So whenever we would remove the
970
+ lower part of a constraint, remove the higher instead.
971
+ """
972
+ while True:
973
+ for low, high in constraints:
974
+ if low == node and high in nodes:
975
+ node = high
976
+ break
977
+ else: # no break, couldn't find node in constraints
978
+ break
979
+ return frozenset(nodes - {node})
980
+
981
+ @staticmethod
982
+ def _find_permutations(top_partitions, bottom_partitions):
983
+ """
984
+ Return the pairs of top/bottom partitions where the partitions are
985
+ different. Ensures that all partitions in both top and bottom
986
+ partitions have size 1.
987
+ """
988
+ # Find permutations
989
+ permutations = set()
990
+ for top, bot in zip(top_partitions, bottom_partitions):
991
+ # top and bot have only one element
992
+ if len(top) != 1 or len(bot) != 1:
993
+ raise IndexError(
994
+ "Not all nodes are coupled. This is"
995
+ f" impossible: {top_partitions}, {bottom_partitions}"
996
+ )
997
+ if top != bot:
998
+ permutations.add(frozenset((next(iter(top)), next(iter(bot)))))
999
+ return permutations
1000
+
1001
+ @staticmethod
1002
+ def _update_orbits(orbits, permutations):
1003
+ """
1004
+ Update orbits based on permutations. Orbits is modified in place.
1005
+ For every pair of items in permutations their respective orbits are
1006
+ merged.
1007
+ """
1008
+ for permutation in permutations:
1009
+ node, node2 = permutation
1010
+ # Find the orbits that contain node and node2, and replace the
1011
+ # orbit containing node with the union
1012
+ first = second = None
1013
+ for idx, orbit in enumerate(orbits):
1014
+ if first is not None and second is not None:
1015
+ break
1016
+ if node in orbit:
1017
+ first = idx
1018
+ if node2 in orbit:
1019
+ second = idx
1020
+ if first != second:
1021
+ orbits[first].update(orbits[second])
1022
+ del orbits[second]
1023
+
1024
+ def _couple_nodes(
1025
+ self,
1026
+ top_partitions,
1027
+ bottom_partitions,
1028
+ pair_idx,
1029
+ t_node,
1030
+ b_node,
1031
+ graph,
1032
+ edge_colors,
1033
+ ):
1034
+ """
1035
+ Generate new partitions from top and bottom_partitions where t_node is
1036
+ coupled to b_node. pair_idx is the index of the partitions where t_ and
1037
+ b_node can be found.
1038
+ """
1039
+ t_partition = top_partitions[pair_idx]
1040
+ b_partition = bottom_partitions[pair_idx]
1041
+ assert t_node in t_partition and b_node in b_partition
1042
+ # Couple node to node2. This means they get their own partition
1043
+ new_top_partitions = [top.copy() for top in top_partitions]
1044
+ new_bottom_partitions = [bot.copy() for bot in bottom_partitions]
1045
+ new_t_groups = {t_node}, t_partition - {t_node}
1046
+ new_b_groups = {b_node}, b_partition - {b_node}
1047
+ # Replace the old partitions with the coupled ones
1048
+ del new_top_partitions[pair_idx]
1049
+ del new_bottom_partitions[pair_idx]
1050
+ new_top_partitions[pair_idx:pair_idx] = new_t_groups
1051
+ new_bottom_partitions[pair_idx:pair_idx] = new_b_groups
1052
+
1053
+ new_top_partitions = self._refine_node_partitions(
1054
+ graph, new_top_partitions, edge_colors
1055
+ )
1056
+ new_bottom_partitions = self._refine_node_partitions(
1057
+ graph, new_bottom_partitions, edge_colors, branch=True
1058
+ )
1059
+ new_top_partitions = list(new_top_partitions)
1060
+ assert len(new_top_partitions) == 1
1061
+ new_top_partitions = new_top_partitions[0]
1062
+ for bot in new_bottom_partitions:
1063
+ yield list(new_top_partitions), bot
1064
+
1065
+ def _process_ordered_pair_partitions(
1066
+ self,
1067
+ graph,
1068
+ top_partitions,
1069
+ bottom_partitions,
1070
+ edge_colors,
1071
+ orbits=None,
1072
+ cosets=None,
1073
+ ):
1074
+ """
1075
+ Processes ordered pair partitions as per the reference paper. Finds and
1076
+ returns all permutations and cosets that leave the graph unchanged.
1077
+ """
1078
+ if orbits is None:
1079
+ orbits = [{node} for node in graph.nodes]
1080
+ else:
1081
+ # Note that we don't copy orbits when we are given one. This means
1082
+ # we leak information between the recursive branches. This is
1083
+ # intentional!
1084
+ orbits = orbits
1085
+ if cosets is None:
1086
+ cosets = {}
1087
+ else:
1088
+ cosets = cosets.copy()
1089
+
1090
+ assert all(
1091
+ len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)
1092
+ )
1093
+
1094
+ # BASECASE
1095
+ if all(len(top) == 1 for top in top_partitions):
1096
+ # All nodes are mapped
1097
+ permutations = self._find_permutations(top_partitions, bottom_partitions)
1098
+ self._update_orbits(orbits, permutations)
1099
+ if permutations:
1100
+ return [permutations], cosets
1101
+ else:
1102
+ return [], cosets
1103
+
1104
+ permutations = []
1105
+ unmapped_nodes = {
1106
+ (node, idx)
1107
+ for idx, t_partition in enumerate(top_partitions)
1108
+ for node in t_partition
1109
+ if len(t_partition) > 1
1110
+ }
1111
+ node, pair_idx = min(unmapped_nodes)
1112
+ b_partition = bottom_partitions[pair_idx]
1113
+
1114
+ for node2 in sorted(b_partition):
1115
+ if len(b_partition) == 1:
1116
+ # Can never result in symmetry
1117
+ continue
1118
+ if node != node2 and any(
1119
+ node in orbit and node2 in orbit for orbit in orbits
1120
+ ):
1121
+ # Orbit prune branch
1122
+ continue
1123
+ # REDUCTION
1124
+ # Couple node to node2
1125
+ partitions = self._couple_nodes(
1126
+ top_partitions,
1127
+ bottom_partitions,
1128
+ pair_idx,
1129
+ node,
1130
+ node2,
1131
+ graph,
1132
+ edge_colors,
1133
+ )
1134
+ for opp in partitions:
1135
+ new_top_partitions, new_bottom_partitions = opp
1136
+
1137
+ new_perms, new_cosets = self._process_ordered_pair_partitions(
1138
+ graph,
1139
+ new_top_partitions,
1140
+ new_bottom_partitions,
1141
+ edge_colors,
1142
+ orbits,
1143
+ cosets,
1144
+ )
1145
+ # COMBINATION
1146
+ permutations += new_perms
1147
+ cosets.update(new_cosets)
1148
+
1149
+ mapped = {
1150
+ k
1151
+ for top, bottom in zip(top_partitions, bottom_partitions)
1152
+ for k in top
1153
+ if len(top) == 1 and top == bottom
1154
+ }
1155
+ ks = {k for k in graph.nodes if k < node}
1156
+ # Have all nodes with ID < node been mapped?
1157
+ find_coset = ks <= mapped and node not in cosets
1158
+ if find_coset:
1159
+ # Find the orbit that contains node
1160
+ for orbit in orbits:
1161
+ if node in orbit:
1162
+ cosets[node] = orbit.copy()
1163
+ return permutations, cosets
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Graph isomorphism functions.
3
+ """
4
+ import networkx as nx
5
+ from networkx.exception import NetworkXError
6
+
7
+ __all__ = [
8
+ "could_be_isomorphic",
9
+ "fast_could_be_isomorphic",
10
+ "faster_could_be_isomorphic",
11
+ "is_isomorphic",
12
+ ]
13
+
14
+
15
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
16
+ def could_be_isomorphic(G1, G2):
17
+ """Returns False if graphs are definitely not isomorphic.
18
+ True does NOT guarantee isomorphism.
19
+
20
+ Parameters
21
+ ----------
22
+ G1, G2 : graphs
23
+ The two graphs G1 and G2 must be the same type.
24
+
25
+ Notes
26
+ -----
27
+ Checks for matching degree, triangle, and number of cliques sequences.
28
+ The triangle sequence contains the number of triangles each node is part of.
29
+ The clique sequence contains for each node the number of maximal cliques
30
+ involving that node.
31
+
32
+ """
33
+
34
+ # Check global properties
35
+ if G1.order() != G2.order():
36
+ return False
37
+
38
+ # Check local properties
39
+ d1 = G1.degree()
40
+ t1 = nx.triangles(G1)
41
+ clqs_1 = list(nx.find_cliques(G1))
42
+ c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1} # number of cliques
43
+ props1 = [[d, t1[v], c1[v]] for v, d in d1]
44
+ props1.sort()
45
+
46
+ d2 = G2.degree()
47
+ t2 = nx.triangles(G2)
48
+ clqs_2 = list(nx.find_cliques(G2))
49
+ c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2} # number of cliques
50
+ props2 = [[d, t2[v], c2[v]] for v, d in d2]
51
+ props2.sort()
52
+
53
+ if props1 != props2:
54
+ return False
55
+
56
+ # OK...
57
+ return True
58
+
59
+
60
+ graph_could_be_isomorphic = could_be_isomorphic
61
+
62
+
63
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
64
+ def fast_could_be_isomorphic(G1, G2):
65
+ """Returns False if graphs are definitely not isomorphic.
66
+
67
+ True does NOT guarantee isomorphism.
68
+
69
+ Parameters
70
+ ----------
71
+ G1, G2 : graphs
72
+ The two graphs G1 and G2 must be the same type.
73
+
74
+ Notes
75
+ -----
76
+ Checks for matching degree and triangle sequences. The triangle
77
+ sequence contains the number of triangles each node is part of.
78
+ """
79
+ # Check global properties
80
+ if G1.order() != G2.order():
81
+ return False
82
+
83
+ # Check local properties
84
+ d1 = G1.degree()
85
+ t1 = nx.triangles(G1)
86
+ props1 = [[d, t1[v]] for v, d in d1]
87
+ props1.sort()
88
+
89
+ d2 = G2.degree()
90
+ t2 = nx.triangles(G2)
91
+ props2 = [[d, t2[v]] for v, d in d2]
92
+ props2.sort()
93
+
94
+ if props1 != props2:
95
+ return False
96
+
97
+ # OK...
98
+ return True
99
+
100
+
101
+ fast_graph_could_be_isomorphic = fast_could_be_isomorphic
102
+
103
+
104
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
105
+ def faster_could_be_isomorphic(G1, G2):
106
+ """Returns False if graphs are definitely not isomorphic.
107
+
108
+ True does NOT guarantee isomorphism.
109
+
110
+ Parameters
111
+ ----------
112
+ G1, G2 : graphs
113
+ The two graphs G1 and G2 must be the same type.
114
+
115
+ Notes
116
+ -----
117
+ Checks for matching degree sequences.
118
+ """
119
+ # Check global properties
120
+ if G1.order() != G2.order():
121
+ return False
122
+
123
+ # Check local properties
124
+ d1 = sorted(d for n, d in G1.degree())
125
+ d2 = sorted(d for n, d in G2.degree())
126
+
127
+ if d1 != d2:
128
+ return False
129
+
130
+ # OK...
131
+ return True
132
+
133
+
134
+ faster_graph_could_be_isomorphic = faster_could_be_isomorphic
135
+
136
+
137
+ @nx._dispatchable(
138
+ graphs={"G1": 0, "G2": 1},
139
+ preserve_edge_attrs="edge_match",
140
+ preserve_node_attrs="node_match",
141
+ )
142
+ def is_isomorphic(G1, G2, node_match=None, edge_match=None):
143
+ """Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
144
+
145
+ Parameters
146
+ ----------
147
+ G1, G2: graphs
148
+ The two graphs G1 and G2 must be the same type.
149
+
150
+ node_match : callable
151
+ A function that returns True if node n1 in G1 and n2 in G2 should
152
+ be considered equal during the isomorphism test.
153
+ If node_match is not specified then node attributes are not considered.
154
+
155
+ The function will be called like
156
+
157
+ node_match(G1.nodes[n1], G2.nodes[n2]).
158
+
159
+ That is, the function will receive the node attribute dictionaries
160
+ for n1 and n2 as inputs.
161
+
162
+ edge_match : callable
163
+ A function that returns True if the edge attribute dictionary
164
+ for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
165
+ be considered equal during the isomorphism test. If edge_match is
166
+ not specified then edge attributes are not considered.
167
+
168
+ The function will be called like
169
+
170
+ edge_match(G1[u1][v1], G2[u2][v2]).
171
+
172
+ That is, the function will receive the edge attribute dictionaries
173
+ of the edges under consideration.
174
+
175
+ Notes
176
+ -----
177
+ Uses the vf2 algorithm [1]_.
178
+
179
+ Examples
180
+ --------
181
+ >>> import networkx.algorithms.isomorphism as iso
182
+
183
+ For digraphs G1 and G2, using 'weight' edge attribute (default: 1)
184
+
185
+ >>> G1 = nx.DiGraph()
186
+ >>> G2 = nx.DiGraph()
187
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=1)
188
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=2)
189
+ >>> em = iso.numerical_edge_match("weight", 1)
190
+ >>> nx.is_isomorphic(G1, G2) # no weights considered
191
+ True
192
+ >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights
193
+ False
194
+
195
+ For multidigraphs G1 and G2, using 'fill' node attribute (default: '')
196
+
197
+ >>> G1 = nx.MultiDiGraph()
198
+ >>> G2 = nx.MultiDiGraph()
199
+ >>> G1.add_nodes_from([1, 2, 3], fill="red")
200
+ >>> G2.add_nodes_from([10, 20, 30, 40], fill="red")
201
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5)
202
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=3)
203
+ >>> nm = iso.categorical_node_match("fill", "red")
204
+ >>> nx.is_isomorphic(G1, G2, node_match=nm)
205
+ True
206
+
207
+ For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7)
208
+
209
+ >>> G1.add_edge(1, 2, weight=7)
210
+ 1
211
+ >>> G2.add_edge(10, 20)
212
+ 1
213
+ >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6)
214
+ >>> nx.is_isomorphic(G1, G2, edge_match=em)
215
+ True
216
+
217
+ For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes
218
+ with default values 7 and 2.5. Also using 'fill' node attribute with
219
+ default value 'red'.
220
+
221
+ >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5])
222
+ >>> nm = iso.categorical_node_match("fill", "red")
223
+ >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm)
224
+ True
225
+
226
+ See Also
227
+ --------
228
+ numerical_node_match, numerical_edge_match, numerical_multiedge_match
229
+ categorical_node_match, categorical_edge_match, categorical_multiedge_match
230
+
231
+ References
232
+ ----------
233
+ .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento,
234
+ "An Improved Algorithm for Matching Large Graphs",
235
+ 3rd IAPR-TC15 Workshop on Graph-based Representations in
236
+ Pattern Recognition, Cuen, pp. 149-159, 2001.
237
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
238
+ """
239
+ if G1.is_directed() and G2.is_directed():
240
+ GM = nx.algorithms.isomorphism.DiGraphMatcher
241
+ elif (not G1.is_directed()) and (not G2.is_directed()):
242
+ GM = nx.algorithms.isomorphism.GraphMatcher
243
+ else:
244
+ raise NetworkXError("Graphs G1 and G2 are not of the same type.")
245
+
246
+ gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
247
+
248
+ return gm.is_isomorphic()
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py ADDED
@@ -0,0 +1,1065 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *************
3
+ VF2 Algorithm
4
+ *************
5
+
6
+ An implementation of VF2 algorithm for graph isomorphism testing.
7
+
8
+ The simplest interface to use this module is to call the
9
+ :func:`is_isomorphic <networkx.algorithms.isomorphism.is_isomorphic>`
10
+ function.
11
+
12
+ Introduction
13
+ ------------
14
+
15
+ The GraphMatcher and DiGraphMatcher are responsible for matching
16
+ graphs or directed graphs in a predetermined manner. This
17
+ usually means a check for an isomorphism, though other checks
18
+ are also possible. For example, a subgraph of one graph
19
+ can be checked for isomorphism to a second graph.
20
+
21
+ Matching is done via syntactic feasibility. It is also possible
22
+ to check for semantic feasibility. Feasibility, then, is defined
23
+ as the logical AND of the two functions.
24
+
25
+ To include a semantic check, the (Di)GraphMatcher class should be
26
+ subclassed, and the
27
+ :meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
28
+ function should be redefined. By default, the semantic feasibility function always
29
+ returns ``True``. The effect of this is that semantics are not
30
+ considered in the matching of G1 and G2.
31
+
32
+ Examples
33
+ --------
34
+
35
+ Suppose G1 and G2 are isomorphic graphs. Verification is as follows:
36
+
37
+ >>> from networkx.algorithms import isomorphism
38
+ >>> G1 = nx.path_graph(4)
39
+ >>> G2 = nx.path_graph(4)
40
+ >>> GM = isomorphism.GraphMatcher(G1, G2)
41
+ >>> GM.is_isomorphic()
42
+ True
43
+
44
+ GM.mapping stores the isomorphism mapping from G1 to G2.
45
+
46
+ >>> GM.mapping
47
+ {0: 0, 1: 1, 2: 2, 3: 3}
48
+
49
+
50
+ Suppose G1 and G2 are isomorphic directed graphs.
51
+ Verification is as follows:
52
+
53
+ >>> G1 = nx.path_graph(4, create_using=nx.DiGraph())
54
+ >>> G2 = nx.path_graph(4, create_using=nx.DiGraph())
55
+ >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
56
+ >>> DiGM.is_isomorphic()
57
+ True
58
+
59
+ DiGM.mapping stores the isomorphism mapping from G1 to G2.
60
+
61
+ >>> DiGM.mapping
62
+ {0: 0, 1: 1, 2: 2, 3: 3}
63
+
64
+
65
+
66
+ Subgraph Isomorphism
67
+ --------------------
68
+ Graph theory literature can be ambiguous about the meaning of the
69
+ above statement, and we seek to clarify it now.
70
+
71
+ In the VF2 literature, a mapping `M` is said to be a graph-subgraph
72
+ isomorphism iff `M` is an isomorphism between `G2` and a subgraph of `G1`.
73
+ Thus, to say that `G1` and `G2` are graph-subgraph isomorphic is to say
74
+ that a subgraph of `G1` is isomorphic to `G2`.
75
+
76
+ Other literature uses the phrase 'subgraph isomorphic' as in '`G1` does
77
+ not have a subgraph isomorphic to `G2`'. Another use is as an in adverb
78
+ for isomorphic. Thus, to say that `G1` and `G2` are subgraph isomorphic
79
+ is to say that a subgraph of `G1` is isomorphic to `G2`.
80
+
81
+ Finally, the term 'subgraph' can have multiple meanings. In this
82
+ context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced
83
+ subgraph isomorphisms are not directly supported, but one should be
84
+ able to perform the check by making use of
85
+ :func:`line_graph <networkx.generators.line.line_graph>`. For
86
+ subgraphs which are not induced, the term 'monomorphism' is preferred
87
+ over 'isomorphism'.
88
+
89
+ Let ``G = (N, E)`` be a graph with a set of nodes `N` and set of edges `E`.
90
+
91
+ If ``G' = (N', E')`` is a subgraph, then:
92
+ `N'` is a subset of `N` and
93
+ `E'` is a subset of `E`.
94
+
95
+ If ``G' = (N', E')`` is a node-induced subgraph, then:
96
+ `N'` is a subset of `N` and
97
+ `E'` is the subset of edges in `E` relating nodes in `N'`.
98
+
99
+ If `G' = (N', E')` is an edge-induced subgraph, then:
100
+ `N'` is the subset of nodes in `N` related by edges in `E'` and
101
+ `E'` is a subset of `E`.
102
+
103
+ If `G' = (N', E')` is a monomorphism, then:
104
+ `N'` is a subset of `N` and
105
+ `E'` is a subset of the set of edges in `E` relating nodes in `N'`.
106
+
107
+ Note that if `G'` is a node-induced subgraph of `G`, then it is always a
108
+ subgraph monomorphism of `G`, but the opposite is not always true, as a
109
+ monomorphism can have fewer edges.
110
+
111
+ References
112
+ ----------
113
+ [1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento,
114
+ "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs",
115
+ IEEE Transactions on Pattern Analysis and Machine Intelligence,
116
+ vol. 26, no. 10, pp. 1367-1372, Oct., 2004.
117
+ http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf
118
+
119
+ [2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved
120
+ Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
121
+ on Graph-based Representations in Pattern Recognition, Cuen,
122
+ pp. 149-159, 2001.
123
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
124
+
125
+ See Also
126
+ --------
127
+ :meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
128
+ :meth:`syntactic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.syntactic_feasibility>`
129
+
130
+ Notes
131
+ -----
132
+
133
+ The implementation handles both directed and undirected graphs as well
134
+ as multigraphs.
135
+
136
+ In general, the subgraph isomorphism problem is NP-complete whereas the
137
+ graph isomorphism problem is most likely not NP-complete (although no
138
+ polynomial-time algorithm is known to exist).
139
+
140
+ """
141
+
142
+ # This work was originally coded by Christopher Ellison
143
+ # as part of the Computational Mechanics Python (CMPy) project.
144
+ # James P. Crutchfield, principal investigator.
145
+ # Complexity Sciences Center and Physics Department, UC Davis.
146
+
147
+ import sys
148
+
149
+ __all__ = ["GraphMatcher", "DiGraphMatcher"]
150
+
151
+
152
+ class GraphMatcher:
153
+ """Implementation of VF2 algorithm for matching undirected graphs.
154
+
155
+ Suitable for Graph and MultiGraph instances.
156
+ """
157
+
158
+ def __init__(self, G1, G2):
159
+ """Initialize GraphMatcher.
160
+
161
+ Parameters
162
+ ----------
163
+ G1,G2: NetworkX Graph or MultiGraph instances.
164
+ The two graphs to check for isomorphism or monomorphism.
165
+
166
+ Examples
167
+ --------
168
+ To create a GraphMatcher which checks for syntactic feasibility:
169
+
170
+ >>> from networkx.algorithms import isomorphism
171
+ >>> G1 = nx.path_graph(4)
172
+ >>> G2 = nx.path_graph(4)
173
+ >>> GM = isomorphism.GraphMatcher(G1, G2)
174
+ """
175
+ self.G1 = G1
176
+ self.G2 = G2
177
+ self.G1_nodes = set(G1.nodes())
178
+ self.G2_nodes = set(G2.nodes())
179
+ self.G2_node_order = {n: i for i, n in enumerate(G2)}
180
+
181
+ # Set recursion limit.
182
+ self.old_recursion_limit = sys.getrecursionlimit()
183
+ expected_max_recursion_level = len(self.G2)
184
+ if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
185
+ # Give some breathing room.
186
+ sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
187
+
188
+ # Declare that we will be searching for a graph-graph isomorphism.
189
+ self.test = "graph"
190
+
191
+ # Initialize state
192
+ self.initialize()
193
+
194
+ def reset_recursion_limit(self):
195
+ """Restores the recursion limit."""
196
+ # TODO:
197
+ # Currently, we use recursion and set the recursion level higher.
198
+ # It would be nice to restore the level, but because the
199
+ # (Di)GraphMatcher classes make use of cyclic references, garbage
200
+ # collection will never happen when we define __del__() to
201
+ # restore the recursion level. The result is a memory leak.
202
+ # So for now, we do not automatically restore the recursion level,
203
+ # and instead provide a method to do this manually. Eventually,
204
+ # we should turn this into a non-recursive implementation.
205
+ sys.setrecursionlimit(self.old_recursion_limit)
206
+
207
+ def candidate_pairs_iter(self):
208
+ """Iterator over candidate pairs of nodes in G1 and G2."""
209
+
210
+ # All computations are done using the current state!
211
+
212
+ G1_nodes = self.G1_nodes
213
+ G2_nodes = self.G2_nodes
214
+ min_key = self.G2_node_order.__getitem__
215
+
216
+ # First we compute the inout-terminal sets.
217
+ T1_inout = [node for node in self.inout_1 if node not in self.core_1]
218
+ T2_inout = [node for node in self.inout_2 if node not in self.core_2]
219
+
220
+ # If T1_inout and T2_inout are both nonempty.
221
+ # P(s) = T1_inout x {min T2_inout}
222
+ if T1_inout and T2_inout:
223
+ node_2 = min(T2_inout, key=min_key)
224
+ for node_1 in T1_inout:
225
+ yield node_1, node_2
226
+
227
+ else:
228
+ # If T1_inout and T2_inout were both empty....
229
+ # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
230
+ # if not (T1_inout or T2_inout): # as suggested by [2], incorrect
231
+ if 1: # as inferred from [1], correct
232
+ # First we determine the candidate node for G2
233
+ other_node = min(G2_nodes - set(self.core_2), key=min_key)
234
+ for node in self.G1:
235
+ if node not in self.core_1:
236
+ yield node, other_node
237
+
238
+ # For all other cases, we don't have any candidate pairs.
239
+
240
+ def initialize(self):
241
+ """Reinitializes the state of the algorithm.
242
+
243
+ This method should be redefined if using something other than GMState.
244
+ If only subclassing GraphMatcher, a redefinition is not necessary.
245
+
246
+ """
247
+
248
+ # core_1[n] contains the index of the node paired with n, which is m,
249
+ # provided n is in the mapping.
250
+ # core_2[m] contains the index of the node paired with m, which is n,
251
+ # provided m is in the mapping.
252
+ self.core_1 = {}
253
+ self.core_2 = {}
254
+
255
+ # See the paper for definitions of M_x and T_x^{y}
256
+
257
+ # inout_1[n] is non-zero if n is in M_1 or in T_1^{inout}
258
+ # inout_2[m] is non-zero if m is in M_2 or in T_2^{inout}
259
+ #
260
+ # The value stored is the depth of the SSR tree when the node became
261
+ # part of the corresponding set.
262
+ self.inout_1 = {}
263
+ self.inout_2 = {}
264
+ # Practically, these sets simply store the nodes in the subgraph.
265
+
266
+ self.state = GMState(self)
267
+
268
+ # Provide a convenient way to access the isomorphism mapping.
269
+ self.mapping = self.core_1.copy()
270
+
271
+ def is_isomorphic(self):
272
+ """Returns True if G1 and G2 are isomorphic graphs."""
273
+
274
+ # Let's do two very quick checks!
275
+ # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)?
276
+ # For now, I just copy the code.
277
+
278
+ # Check global properties
279
+ if self.G1.order() != self.G2.order():
280
+ return False
281
+
282
+ # Check local properties
283
+ d1 = sorted(d for n, d in self.G1.degree())
284
+ d2 = sorted(d for n, d in self.G2.degree())
285
+ if d1 != d2:
286
+ return False
287
+
288
+ try:
289
+ x = next(self.isomorphisms_iter())
290
+ return True
291
+ except StopIteration:
292
+ return False
293
+
294
+ def isomorphisms_iter(self):
295
+ """Generator over isomorphisms between G1 and G2."""
296
+ # Declare that we are looking for a graph-graph isomorphism.
297
+ self.test = "graph"
298
+ self.initialize()
299
+ yield from self.match()
300
+
301
+ def match(self):
302
+ """Extends the isomorphism mapping.
303
+
304
+ This function is called recursively to determine if a complete
305
+ isomorphism can be found between G1 and G2. It cleans up the class
306
+ variables after each recursive call. If an isomorphism is found,
307
+ we yield the mapping.
308
+
309
+ """
310
+ if len(self.core_1) == len(self.G2):
311
+ # Save the final mapping, otherwise garbage collection deletes it.
312
+ self.mapping = self.core_1.copy()
313
+ # The mapping is complete.
314
+ yield self.mapping
315
+ else:
316
+ for G1_node, G2_node in self.candidate_pairs_iter():
317
+ if self.syntactic_feasibility(G1_node, G2_node):
318
+ if self.semantic_feasibility(G1_node, G2_node):
319
+ # Recursive call, adding the feasible state.
320
+ newstate = self.state.__class__(self, G1_node, G2_node)
321
+ yield from self.match()
322
+
323
+ # restore data structures
324
+ newstate.restore()
325
+
326
+ def semantic_feasibility(self, G1_node, G2_node):
327
+ """Returns True if adding (G1_node, G2_node) is semantically feasible.
328
+
329
+ The semantic feasibility function should return True if it is
330
+ acceptable to add the candidate pair (G1_node, G2_node) to the current
331
+ partial isomorphism mapping. The logic should focus on semantic
332
+ information contained in the edge data or a formalized node class.
333
+
334
+ By acceptable, we mean that the subsequent mapping can still become a
335
+ complete isomorphism mapping. Thus, if adding the candidate pair
336
+ definitely makes it so that the subsequent mapping cannot become a
337
+ complete isomorphism mapping, then this function must return False.
338
+
339
+ The default semantic feasibility function always returns True. The
340
+ effect is that semantics are not considered in the matching of G1
341
+ and G2.
342
+
343
+ The semantic checks might differ based on the what type of test is
344
+ being performed. A keyword description of the test is stored in
345
+ self.test. Here is a quick description of the currently implemented
346
+ tests::
347
+
348
+ test='graph'
349
+ Indicates that the graph matcher is looking for a graph-graph
350
+ isomorphism.
351
+
352
+ test='subgraph'
353
+ Indicates that the graph matcher is looking for a subgraph-graph
354
+ isomorphism such that a subgraph of G1 is isomorphic to G2.
355
+
356
+ test='mono'
357
+ Indicates that the graph matcher is looking for a subgraph-graph
358
+ monomorphism such that a subgraph of G1 is monomorphic to G2.
359
+
360
+ Any subclass which redefines semantic_feasibility() must maintain
361
+ the above form to keep the match() method functional. Implementations
362
+ should consider multigraphs.
363
+ """
364
+ return True
365
+
366
+ def subgraph_is_isomorphic(self):
367
+ """Returns True if a subgraph of G1 is isomorphic to G2."""
368
+ try:
369
+ x = next(self.subgraph_isomorphisms_iter())
370
+ return True
371
+ except StopIteration:
372
+ return False
373
+
374
+ def subgraph_is_monomorphic(self):
375
+ """Returns True if a subgraph of G1 is monomorphic to G2."""
376
+ try:
377
+ x = next(self.subgraph_monomorphisms_iter())
378
+ return True
379
+ except StopIteration:
380
+ return False
381
+
382
+ # subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent)
383
+
384
+ def subgraph_isomorphisms_iter(self):
385
+ """Generator over isomorphisms between a subgraph of G1 and G2."""
386
+ # Declare that we are looking for graph-subgraph isomorphism.
387
+ self.test = "subgraph"
388
+ self.initialize()
389
+ yield from self.match()
390
+
391
+ def subgraph_monomorphisms_iter(self):
392
+ """Generator over monomorphisms between a subgraph of G1 and G2."""
393
+ # Declare that we are looking for graph-subgraph monomorphism.
394
+ self.test = "mono"
395
+ self.initialize()
396
+ yield from self.match()
397
+
398
+ # subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent)
399
+
400
+ def syntactic_feasibility(self, G1_node, G2_node):
401
+ """Returns True if adding (G1_node, G2_node) is syntactically feasible.
402
+
403
+ This function returns True if it is adding the candidate pair
404
+ to the current partial isomorphism/monomorphism mapping is allowable.
405
+ The addition is allowable if the inclusion of the candidate pair does
406
+ not make it impossible for an isomorphism/monomorphism to be found.
407
+ """
408
+
409
+ # The VF2 algorithm was designed to work with graphs having, at most,
410
+ # one edge connecting any two nodes. This is not the case when
411
+ # dealing with an MultiGraphs.
412
+ #
413
+ # Basically, when we test the look-ahead rules R_neighbor, we will
414
+ # make sure that the number of edges are checked. We also add
415
+ # a R_self check to verify that the number of selfloops is acceptable.
416
+ #
417
+ # Users might be comparing Graph instances with MultiGraph instances.
418
+ # So the generic GraphMatcher class must work with MultiGraphs.
419
+ # Care must be taken since the value in the innermost dictionary is a
420
+ # singlet for Graph instances. For MultiGraphs, the value in the
421
+ # innermost dictionary is a list.
422
+
423
+ ###
424
+ # Test at each step to get a return value as soon as possible.
425
+ ###
426
+
427
+ # Look ahead 0
428
+
429
+ # R_self
430
+
431
+ # The number of selfloops for G1_node must equal the number of
432
+ # self-loops for G2_node. Without this check, we would fail on
433
+ # R_neighbor at the next recursion level. But it is good to prune the
434
+ # search tree now.
435
+
436
+ if self.test == "mono":
437
+ if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
438
+ G2_node, G2_node
439
+ ):
440
+ return False
441
+ else:
442
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
443
+ G2_node, G2_node
444
+ ):
445
+ return False
446
+
447
+ # R_neighbor
448
+
449
+ # For each neighbor n' of n in the partial mapping, the corresponding
450
+ # node m' is a neighbor of m, and vice versa. Also, the number of
451
+ # edges must be equal.
452
+ if self.test != "mono":
453
+ for neighbor in self.G1[G1_node]:
454
+ if neighbor in self.core_1:
455
+ if self.core_1[neighbor] not in self.G2[G2_node]:
456
+ return False
457
+ elif self.G1.number_of_edges(
458
+ neighbor, G1_node
459
+ ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node):
460
+ return False
461
+
462
+ for neighbor in self.G2[G2_node]:
463
+ if neighbor in self.core_2:
464
+ if self.core_2[neighbor] not in self.G1[G1_node]:
465
+ return False
466
+ elif self.test == "mono":
467
+ if self.G1.number_of_edges(
468
+ self.core_2[neighbor], G1_node
469
+ ) < self.G2.number_of_edges(neighbor, G2_node):
470
+ return False
471
+ else:
472
+ if self.G1.number_of_edges(
473
+ self.core_2[neighbor], G1_node
474
+ ) != self.G2.number_of_edges(neighbor, G2_node):
475
+ return False
476
+
477
+ if self.test != "mono":
478
+ # Look ahead 1
479
+
480
+ # R_terminout
481
+ # The number of neighbors of n in T_1^{inout} is equal to the
482
+ # number of neighbors of m that are in T_2^{inout}, and vice versa.
483
+ num1 = 0
484
+ for neighbor in self.G1[G1_node]:
485
+ if (neighbor in self.inout_1) and (neighbor not in self.core_1):
486
+ num1 += 1
487
+ num2 = 0
488
+ for neighbor in self.G2[G2_node]:
489
+ if (neighbor in self.inout_2) and (neighbor not in self.core_2):
490
+ num2 += 1
491
+ if self.test == "graph":
492
+ if num1 != num2:
493
+ return False
494
+ else: # self.test == 'subgraph'
495
+ if not (num1 >= num2):
496
+ return False
497
+
498
+ # Look ahead 2
499
+
500
+ # R_new
501
+
502
+ # The number of neighbors of n that are neither in the core_1 nor
503
+ # T_1^{inout} is equal to the number of neighbors of m
504
+ # that are neither in core_2 nor T_2^{inout}.
505
+ num1 = 0
506
+ for neighbor in self.G1[G1_node]:
507
+ if neighbor not in self.inout_1:
508
+ num1 += 1
509
+ num2 = 0
510
+ for neighbor in self.G2[G2_node]:
511
+ if neighbor not in self.inout_2:
512
+ num2 += 1
513
+ if self.test == "graph":
514
+ if num1 != num2:
515
+ return False
516
+ else: # self.test == 'subgraph'
517
+ if not (num1 >= num2):
518
+ return False
519
+
520
+ # Otherwise, this node pair is syntactically feasible!
521
+ return True
522
+
523
+
524
+ class DiGraphMatcher(GraphMatcher):
525
+ """Implementation of VF2 algorithm for matching directed graphs.
526
+
527
+ Suitable for DiGraph and MultiDiGraph instances.
528
+ """
529
+
530
+ def __init__(self, G1, G2):
531
+ """Initialize DiGraphMatcher.
532
+
533
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
534
+
535
+ Examples
536
+ --------
537
+ To create a GraphMatcher which checks for syntactic feasibility:
538
+
539
+ >>> from networkx.algorithms import isomorphism
540
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
541
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
542
+ >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
543
+ """
544
+ super().__init__(G1, G2)
545
+
546
+ def candidate_pairs_iter(self):
547
+ """Iterator over candidate pairs of nodes in G1 and G2."""
548
+
549
+ # All computations are done using the current state!
550
+
551
+ G1_nodes = self.G1_nodes
552
+ G2_nodes = self.G2_nodes
553
+ min_key = self.G2_node_order.__getitem__
554
+
555
+ # First we compute the out-terminal sets.
556
+ T1_out = [node for node in self.out_1 if node not in self.core_1]
557
+ T2_out = [node for node in self.out_2 if node not in self.core_2]
558
+
559
+ # If T1_out and T2_out are both nonempty.
560
+ # P(s) = T1_out x {min T2_out}
561
+ if T1_out and T2_out:
562
+ node_2 = min(T2_out, key=min_key)
563
+ for node_1 in T1_out:
564
+ yield node_1, node_2
565
+
566
+ # If T1_out and T2_out were both empty....
567
+ # We compute the in-terminal sets.
568
+
569
+ # elif not (T1_out or T2_out): # as suggested by [2], incorrect
570
+ else: # as suggested by [1], correct
571
+ T1_in = [node for node in self.in_1 if node not in self.core_1]
572
+ T2_in = [node for node in self.in_2 if node not in self.core_2]
573
+
574
+ # If T1_in and T2_in are both nonempty.
575
+ # P(s) = T1_out x {min T2_out}
576
+ if T1_in and T2_in:
577
+ node_2 = min(T2_in, key=min_key)
578
+ for node_1 in T1_in:
579
+ yield node_1, node_2
580
+
581
+ # If all terminal sets are empty...
582
+ # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
583
+
584
+ # elif not (T1_in or T2_in): # as suggested by [2], incorrect
585
+ else: # as inferred from [1], correct
586
+ node_2 = min(G2_nodes - set(self.core_2), key=min_key)
587
+ for node_1 in G1_nodes:
588
+ if node_1 not in self.core_1:
589
+ yield node_1, node_2
590
+
591
+ # For all other cases, we don't have any candidate pairs.
592
+
593
+ def initialize(self):
594
+ """Reinitializes the state of the algorithm.
595
+
596
+ This method should be redefined if using something other than DiGMState.
597
+ If only subclassing GraphMatcher, a redefinition is not necessary.
598
+ """
599
+
600
+ # core_1[n] contains the index of the node paired with n, which is m,
601
+ # provided n is in the mapping.
602
+ # core_2[m] contains the index of the node paired with m, which is n,
603
+ # provided m is in the mapping.
604
+ self.core_1 = {}
605
+ self.core_2 = {}
606
+
607
+ # See the paper for definitions of M_x and T_x^{y}
608
+
609
+ # in_1[n] is non-zero if n is in M_1 or in T_1^{in}
610
+ # out_1[n] is non-zero if n is in M_1 or in T_1^{out}
611
+ #
612
+ # in_2[m] is non-zero if m is in M_2 or in T_2^{in}
613
+ # out_2[m] is non-zero if m is in M_2 or in T_2^{out}
614
+ #
615
+ # The value stored is the depth of the search tree when the node became
616
+ # part of the corresponding set.
617
+ self.in_1 = {}
618
+ self.in_2 = {}
619
+ self.out_1 = {}
620
+ self.out_2 = {}
621
+
622
+ self.state = DiGMState(self)
623
+
624
+ # Provide a convenient way to access the isomorphism mapping.
625
+ self.mapping = self.core_1.copy()
626
+
627
+ def syntactic_feasibility(self, G1_node, G2_node):
628
+ """Returns True if adding (G1_node, G2_node) is syntactically feasible.
629
+
630
+ This function returns True if it is adding the candidate pair
631
+ to the current partial isomorphism/monomorphism mapping is allowable.
632
+ The addition is allowable if the inclusion of the candidate pair does
633
+ not make it impossible for an isomorphism/monomorphism to be found.
634
+ """
635
+
636
+ # The VF2 algorithm was designed to work with graphs having, at most,
637
+ # one edge connecting any two nodes. This is not the case when
638
+ # dealing with an MultiGraphs.
639
+ #
640
+ # Basically, when we test the look-ahead rules R_pred and R_succ, we
641
+ # will make sure that the number of edges are checked. We also add
642
+ # a R_self check to verify that the number of selfloops is acceptable.
643
+
644
+ # Users might be comparing DiGraph instances with MultiDiGraph
645
+ # instances. So the generic DiGraphMatcher class must work with
646
+ # MultiDiGraphs. Care must be taken since the value in the innermost
647
+ # dictionary is a singlet for DiGraph instances. For MultiDiGraphs,
648
+ # the value in the innermost dictionary is a list.
649
+
650
+ ###
651
+ # Test at each step to get a return value as soon as possible.
652
+ ###
653
+
654
+ # Look ahead 0
655
+
656
+ # R_self
657
+
658
+ # The number of selfloops for G1_node must equal the number of
659
+ # self-loops for G2_node. Without this check, we would fail on R_pred
660
+ # at the next recursion level. This should prune the tree even further.
661
+ if self.test == "mono":
662
+ if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
663
+ G2_node, G2_node
664
+ ):
665
+ return False
666
+ else:
667
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
668
+ G2_node, G2_node
669
+ ):
670
+ return False
671
+
672
+ # R_pred
673
+
674
+ # For each predecessor n' of n in the partial mapping, the
675
+ # corresponding node m' is a predecessor of m, and vice versa. Also,
676
+ # the number of edges must be equal
677
+ if self.test != "mono":
678
+ for predecessor in self.G1.pred[G1_node]:
679
+ if predecessor in self.core_1:
680
+ if self.core_1[predecessor] not in self.G2.pred[G2_node]:
681
+ return False
682
+ elif self.G1.number_of_edges(
683
+ predecessor, G1_node
684
+ ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node):
685
+ return False
686
+
687
+ for predecessor in self.G2.pred[G2_node]:
688
+ if predecessor in self.core_2:
689
+ if self.core_2[predecessor] not in self.G1.pred[G1_node]:
690
+ return False
691
+ elif self.test == "mono":
692
+ if self.G1.number_of_edges(
693
+ self.core_2[predecessor], G1_node
694
+ ) < self.G2.number_of_edges(predecessor, G2_node):
695
+ return False
696
+ else:
697
+ if self.G1.number_of_edges(
698
+ self.core_2[predecessor], G1_node
699
+ ) != self.G2.number_of_edges(predecessor, G2_node):
700
+ return False
701
+
702
+ # R_succ
703
+
704
+ # For each successor n' of n in the partial mapping, the corresponding
705
+ # node m' is a successor of m, and vice versa. Also, the number of
706
+ # edges must be equal.
707
+ if self.test != "mono":
708
+ for successor in self.G1[G1_node]:
709
+ if successor in self.core_1:
710
+ if self.core_1[successor] not in self.G2[G2_node]:
711
+ return False
712
+ elif self.G1.number_of_edges(
713
+ G1_node, successor
714
+ ) != self.G2.number_of_edges(G2_node, self.core_1[successor]):
715
+ return False
716
+
717
+ for successor in self.G2[G2_node]:
718
+ if successor in self.core_2:
719
+ if self.core_2[successor] not in self.G1[G1_node]:
720
+ return False
721
+ elif self.test == "mono":
722
+ if self.G1.number_of_edges(
723
+ G1_node, self.core_2[successor]
724
+ ) < self.G2.number_of_edges(G2_node, successor):
725
+ return False
726
+ else:
727
+ if self.G1.number_of_edges(
728
+ G1_node, self.core_2[successor]
729
+ ) != self.G2.number_of_edges(G2_node, successor):
730
+ return False
731
+
732
+ if self.test != "mono":
733
+ # Look ahead 1
734
+
735
+ # R_termin
736
+ # The number of predecessors of n that are in T_1^{in} is equal to the
737
+ # number of predecessors of m that are in T_2^{in}.
738
+ num1 = 0
739
+ for predecessor in self.G1.pred[G1_node]:
740
+ if (predecessor in self.in_1) and (predecessor not in self.core_1):
741
+ num1 += 1
742
+ num2 = 0
743
+ for predecessor in self.G2.pred[G2_node]:
744
+ if (predecessor in self.in_2) and (predecessor not in self.core_2):
745
+ num2 += 1
746
+ if self.test == "graph":
747
+ if num1 != num2:
748
+ return False
749
+ else: # self.test == 'subgraph'
750
+ if not (num1 >= num2):
751
+ return False
752
+
753
+ # The number of successors of n that are in T_1^{in} is equal to the
754
+ # number of successors of m that are in T_2^{in}.
755
+ num1 = 0
756
+ for successor in self.G1[G1_node]:
757
+ if (successor in self.in_1) and (successor not in self.core_1):
758
+ num1 += 1
759
+ num2 = 0
760
+ for successor in self.G2[G2_node]:
761
+ if (successor in self.in_2) and (successor not in self.core_2):
762
+ num2 += 1
763
+ if self.test == "graph":
764
+ if num1 != num2:
765
+ return False
766
+ else: # self.test == 'subgraph'
767
+ if not (num1 >= num2):
768
+ return False
769
+
770
+ # R_termout
771
+
772
+ # The number of predecessors of n that are in T_1^{out} is equal to the
773
+ # number of predecessors of m that are in T_2^{out}.
774
+ num1 = 0
775
+ for predecessor in self.G1.pred[G1_node]:
776
+ if (predecessor in self.out_1) and (predecessor not in self.core_1):
777
+ num1 += 1
778
+ num2 = 0
779
+ for predecessor in self.G2.pred[G2_node]:
780
+ if (predecessor in self.out_2) and (predecessor not in self.core_2):
781
+ num2 += 1
782
+ if self.test == "graph":
783
+ if num1 != num2:
784
+ return False
785
+ else: # self.test == 'subgraph'
786
+ if not (num1 >= num2):
787
+ return False
788
+
789
+ # The number of successors of n that are in T_1^{out} is equal to the
790
+ # number of successors of m that are in T_2^{out}.
791
+ num1 = 0
792
+ for successor in self.G1[G1_node]:
793
+ if (successor in self.out_1) and (successor not in self.core_1):
794
+ num1 += 1
795
+ num2 = 0
796
+ for successor in self.G2[G2_node]:
797
+ if (successor in self.out_2) and (successor not in self.core_2):
798
+ num2 += 1
799
+ if self.test == "graph":
800
+ if num1 != num2:
801
+ return False
802
+ else: # self.test == 'subgraph'
803
+ if not (num1 >= num2):
804
+ return False
805
+
806
+ # Look ahead 2
807
+
808
+ # R_new
809
+
810
+ # The number of predecessors of n that are neither in the core_1 nor
811
+ # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m
812
+ # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
813
+ num1 = 0
814
+ for predecessor in self.G1.pred[G1_node]:
815
+ if (predecessor not in self.in_1) and (predecessor not in self.out_1):
816
+ num1 += 1
817
+ num2 = 0
818
+ for predecessor in self.G2.pred[G2_node]:
819
+ if (predecessor not in self.in_2) and (predecessor not in self.out_2):
820
+ num2 += 1
821
+ if self.test == "graph":
822
+ if num1 != num2:
823
+ return False
824
+ else: # self.test == 'subgraph'
825
+ if not (num1 >= num2):
826
+ return False
827
+
828
+ # The number of successors of n that are neither in the core_1 nor
829
+ # T_1^{in} nor T_1^{out} is equal to the number of successors of m
830
+ # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
831
+ num1 = 0
832
+ for successor in self.G1[G1_node]:
833
+ if (successor not in self.in_1) and (successor not in self.out_1):
834
+ num1 += 1
835
+ num2 = 0
836
+ for successor in self.G2[G2_node]:
837
+ if (successor not in self.in_2) and (successor not in self.out_2):
838
+ num2 += 1
839
+ if self.test == "graph":
840
+ if num1 != num2:
841
+ return False
842
+ else: # self.test == 'subgraph'
843
+ if not (num1 >= num2):
844
+ return False
845
+
846
+ # Otherwise, this node pair is syntactically feasible!
847
+ return True
848
+
849
+
850
+ class GMState:
851
+ """Internal representation of state for the GraphMatcher class.
852
+
853
+ This class is used internally by the GraphMatcher class. It is used
854
+ only to store state specific data. There will be at most G2.order() of
855
+ these objects in memory at a time, due to the depth-first search
856
+ strategy employed by the VF2 algorithm.
857
+ """
858
+
859
+ def __init__(self, GM, G1_node=None, G2_node=None):
860
+ """Initializes GMState object.
861
+
862
+ Pass in the GraphMatcher to which this GMState belongs and the
863
+ new node pair that will be added to the GraphMatcher's current
864
+ isomorphism mapping.
865
+ """
866
+ self.GM = GM
867
+
868
+ # Initialize the last stored node pair.
869
+ self.G1_node = None
870
+ self.G2_node = None
871
+ self.depth = len(GM.core_1)
872
+
873
+ if G1_node is None or G2_node is None:
874
+ # Then we reset the class variables
875
+ GM.core_1 = {}
876
+ GM.core_2 = {}
877
+ GM.inout_1 = {}
878
+ GM.inout_2 = {}
879
+
880
+ # Watch out! G1_node == 0 should evaluate to True.
881
+ if G1_node is not None and G2_node is not None:
882
+ # Add the node pair to the isomorphism mapping.
883
+ GM.core_1[G1_node] = G2_node
884
+ GM.core_2[G2_node] = G1_node
885
+
886
+ # Store the node that was added last.
887
+ self.G1_node = G1_node
888
+ self.G2_node = G2_node
889
+
890
+ # Now we must update the other two vectors.
891
+ # We will add only if it is not in there already!
892
+ self.depth = len(GM.core_1)
893
+
894
+ # First we add the new nodes...
895
+ if G1_node not in GM.inout_1:
896
+ GM.inout_1[G1_node] = self.depth
897
+ if G2_node not in GM.inout_2:
898
+ GM.inout_2[G2_node] = self.depth
899
+
900
+ # Now we add every other node...
901
+
902
+ # Updates for T_1^{inout}
903
+ new_nodes = set()
904
+ for node in GM.core_1:
905
+ new_nodes.update(
906
+ [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]
907
+ )
908
+ for node in new_nodes:
909
+ if node not in GM.inout_1:
910
+ GM.inout_1[node] = self.depth
911
+
912
+ # Updates for T_2^{inout}
913
+ new_nodes = set()
914
+ for node in GM.core_2:
915
+ new_nodes.update(
916
+ [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]
917
+ )
918
+ for node in new_nodes:
919
+ if node not in GM.inout_2:
920
+ GM.inout_2[node] = self.depth
921
+
922
+ def restore(self):
923
+ """Deletes the GMState object and restores the class variables."""
924
+ # First we remove the node that was added from the core vectors.
925
+ # Watch out! G1_node == 0 should evaluate to True.
926
+ if self.G1_node is not None and self.G2_node is not None:
927
+ del self.GM.core_1[self.G1_node]
928
+ del self.GM.core_2[self.G2_node]
929
+
930
+ # Now we revert the other two vectors.
931
+ # Thus, we delete all entries which have this depth level.
932
+ for vector in (self.GM.inout_1, self.GM.inout_2):
933
+ for node in list(vector.keys()):
934
+ if vector[node] == self.depth:
935
+ del vector[node]
936
+
937
+
938
+ class DiGMState:
939
+ """Internal representation of state for the DiGraphMatcher class.
940
+
941
+ This class is used internally by the DiGraphMatcher class. It is used
942
+ only to store state specific data. There will be at most G2.order() of
943
+ these objects in memory at a time, due to the depth-first search
944
+ strategy employed by the VF2 algorithm.
945
+
946
+ """
947
+
948
+ def __init__(self, GM, G1_node=None, G2_node=None):
949
+ """Initializes DiGMState object.
950
+
951
+ Pass in the DiGraphMatcher to which this DiGMState belongs and the
952
+ new node pair that will be added to the GraphMatcher's current
953
+ isomorphism mapping.
954
+ """
955
+ self.GM = GM
956
+
957
+ # Initialize the last stored node pair.
958
+ self.G1_node = None
959
+ self.G2_node = None
960
+ self.depth = len(GM.core_1)
961
+
962
+ if G1_node is None or G2_node is None:
963
+ # Then we reset the class variables
964
+ GM.core_1 = {}
965
+ GM.core_2 = {}
966
+ GM.in_1 = {}
967
+ GM.in_2 = {}
968
+ GM.out_1 = {}
969
+ GM.out_2 = {}
970
+
971
+ # Watch out! G1_node == 0 should evaluate to True.
972
+ if G1_node is not None and G2_node is not None:
973
+ # Add the node pair to the isomorphism mapping.
974
+ GM.core_1[G1_node] = G2_node
975
+ GM.core_2[G2_node] = G1_node
976
+
977
+ # Store the node that was added last.
978
+ self.G1_node = G1_node
979
+ self.G2_node = G2_node
980
+
981
+ # Now we must update the other four vectors.
982
+ # We will add only if it is not in there already!
983
+ self.depth = len(GM.core_1)
984
+
985
+ # First we add the new nodes...
986
+ for vector in (GM.in_1, GM.out_1):
987
+ if G1_node not in vector:
988
+ vector[G1_node] = self.depth
989
+ for vector in (GM.in_2, GM.out_2):
990
+ if G2_node not in vector:
991
+ vector[G2_node] = self.depth
992
+
993
+ # Now we add every other node...
994
+
995
+ # Updates for T_1^{in}
996
+ new_nodes = set()
997
+ for node in GM.core_1:
998
+ new_nodes.update(
999
+ [
1000
+ predecessor
1001
+ for predecessor in GM.G1.predecessors(node)
1002
+ if predecessor not in GM.core_1
1003
+ ]
1004
+ )
1005
+ for node in new_nodes:
1006
+ if node not in GM.in_1:
1007
+ GM.in_1[node] = self.depth
1008
+
1009
+ # Updates for T_2^{in}
1010
+ new_nodes = set()
1011
+ for node in GM.core_2:
1012
+ new_nodes.update(
1013
+ [
1014
+ predecessor
1015
+ for predecessor in GM.G2.predecessors(node)
1016
+ if predecessor not in GM.core_2
1017
+ ]
1018
+ )
1019
+ for node in new_nodes:
1020
+ if node not in GM.in_2:
1021
+ GM.in_2[node] = self.depth
1022
+
1023
+ # Updates for T_1^{out}
1024
+ new_nodes = set()
1025
+ for node in GM.core_1:
1026
+ new_nodes.update(
1027
+ [
1028
+ successor
1029
+ for successor in GM.G1.successors(node)
1030
+ if successor not in GM.core_1
1031
+ ]
1032
+ )
1033
+ for node in new_nodes:
1034
+ if node not in GM.out_1:
1035
+ GM.out_1[node] = self.depth
1036
+
1037
+ # Updates for T_2^{out}
1038
+ new_nodes = set()
1039
+ for node in GM.core_2:
1040
+ new_nodes.update(
1041
+ [
1042
+ successor
1043
+ for successor in GM.G2.successors(node)
1044
+ if successor not in GM.core_2
1045
+ ]
1046
+ )
1047
+ for node in new_nodes:
1048
+ if node not in GM.out_2:
1049
+ GM.out_2[node] = self.depth
1050
+
1051
+ def restore(self):
1052
+ """Deletes the DiGMState object and restores the class variables."""
1053
+
1054
+ # First we remove the node that was added from the core vectors.
1055
+ # Watch out! G1_node == 0 should evaluate to True.
1056
+ if self.G1_node is not None and self.G2_node is not None:
1057
+ del self.GM.core_1[self.G1_node]
1058
+ del self.GM.core_2[self.G2_node]
1059
+
1060
+ # Now we revert the other four vectors.
1061
+ # Thus, we delete all entries which have this depth level.
1062
+ for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2):
1063
+ for node in list(vector.keys()):
1064
+ if vector[node] == self.depth:
1065
+ del vector[node]
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py ADDED
@@ -0,0 +1,351 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions which help end users define customize node_match and
2
+ edge_match functions to use during isomorphism checks.
3
+ """
4
+ import math
5
+ import types
6
+ from itertools import permutations
7
+
8
+ __all__ = [
9
+ "categorical_node_match",
10
+ "categorical_edge_match",
11
+ "categorical_multiedge_match",
12
+ "numerical_node_match",
13
+ "numerical_edge_match",
14
+ "numerical_multiedge_match",
15
+ "generic_node_match",
16
+ "generic_edge_match",
17
+ "generic_multiedge_match",
18
+ ]
19
+
20
+
21
+ def copyfunc(f, name=None):
22
+ """Returns a deepcopy of a function."""
23
+ return types.FunctionType(
24
+ f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__
25
+ )
26
+
27
+
28
+ def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
29
+ """Returns True if x and y are sufficiently close, elementwise.
30
+
31
+ Parameters
32
+ ----------
33
+ rtol : float
34
+ The relative error tolerance.
35
+ atol : float
36
+ The absolute error tolerance.
37
+
38
+ """
39
+ # assume finite weights, see numpy.allclose() for reference
40
+ return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y))
41
+
42
+
43
+ categorical_doc = """
44
+ Returns a comparison function for a categorical node attribute.
45
+
46
+ The value(s) of the attr(s) must be hashable and comparable via the ==
47
+ operator since they are placed into a set([]) object. If the sets from
48
+ G1 and G2 are the same, then the constructed function returns True.
49
+
50
+ Parameters
51
+ ----------
52
+ attr : string | list
53
+ The categorical node attribute to compare, or a list of categorical
54
+ node attributes to compare.
55
+ default : value | list
56
+ The default value for the categorical node attribute, or a list of
57
+ default values for the categorical node attributes.
58
+
59
+ Returns
60
+ -------
61
+ match : function
62
+ The customized, categorical `node_match` function.
63
+
64
+ Examples
65
+ --------
66
+ >>> import networkx.algorithms.isomorphism as iso
67
+ >>> nm = iso.categorical_node_match("size", 1)
68
+ >>> nm = iso.categorical_node_match(["color", "size"], ["red", 2])
69
+
70
+ """
71
+
72
+
73
+ def categorical_node_match(attr, default):
74
+ if isinstance(attr, str):
75
+
76
+ def match(data1, data2):
77
+ return data1.get(attr, default) == data2.get(attr, default)
78
+
79
+ else:
80
+ attrs = list(zip(attr, default)) # Python 3
81
+
82
+ def match(data1, data2):
83
+ return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs)
84
+
85
+ return match
86
+
87
+
88
+ categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match")
89
+
90
+
91
+ def categorical_multiedge_match(attr, default):
92
+ if isinstance(attr, str):
93
+
94
+ def match(datasets1, datasets2):
95
+ values1 = {data.get(attr, default) for data in datasets1.values()}
96
+ values2 = {data.get(attr, default) for data in datasets2.values()}
97
+ return values1 == values2
98
+
99
+ else:
100
+ attrs = list(zip(attr, default)) # Python 3
101
+
102
+ def match(datasets1, datasets2):
103
+ values1 = set()
104
+ for data1 in datasets1.values():
105
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
106
+ values1.add(x)
107
+ values2 = set()
108
+ for data2 in datasets2.values():
109
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
110
+ values2.add(x)
111
+ return values1 == values2
112
+
113
+ return match
114
+
115
+
116
+ # Docstrings for categorical functions.
117
+ categorical_node_match.__doc__ = categorical_doc
118
+ categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge")
119
+ tmpdoc = categorical_doc.replace("node", "edge")
120
+ tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match")
121
+ categorical_multiedge_match.__doc__ = tmpdoc
122
+
123
+
124
+ numerical_doc = """
125
+ Returns a comparison function for a numerical node attribute.
126
+
127
+ The value(s) of the attr(s) must be numerical and sortable. If the
128
+ sorted list of values from G1 and G2 are the same within some
129
+ tolerance, then the constructed function returns True.
130
+
131
+ Parameters
132
+ ----------
133
+ attr : string | list
134
+ The numerical node attribute to compare, or a list of numerical
135
+ node attributes to compare.
136
+ default : value | list
137
+ The default value for the numerical node attribute, or a list of
138
+ default values for the numerical node attributes.
139
+ rtol : float
140
+ The relative error tolerance.
141
+ atol : float
142
+ The absolute error tolerance.
143
+
144
+ Returns
145
+ -------
146
+ match : function
147
+ The customized, numerical `node_match` function.
148
+
149
+ Examples
150
+ --------
151
+ >>> import networkx.algorithms.isomorphism as iso
152
+ >>> nm = iso.numerical_node_match("weight", 1.0)
153
+ >>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5])
154
+
155
+ """
156
+
157
+
158
+ def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
159
+ if isinstance(attr, str):
160
+
161
+ def match(data1, data2):
162
+ return math.isclose(
163
+ data1.get(attr, default),
164
+ data2.get(attr, default),
165
+ rel_tol=rtol,
166
+ abs_tol=atol,
167
+ )
168
+
169
+ else:
170
+ attrs = list(zip(attr, default)) # Python 3
171
+
172
+ def match(data1, data2):
173
+ values1 = [data1.get(attr, d) for attr, d in attrs]
174
+ values2 = [data2.get(attr, d) for attr, d in attrs]
175
+ return allclose(values1, values2, rtol=rtol, atol=atol)
176
+
177
+ return match
178
+
179
+
180
+ numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match")
181
+
182
+
183
+ def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
184
+ if isinstance(attr, str):
185
+
186
+ def match(datasets1, datasets2):
187
+ values1 = sorted(data.get(attr, default) for data in datasets1.values())
188
+ values2 = sorted(data.get(attr, default) for data in datasets2.values())
189
+ return allclose(values1, values2, rtol=rtol, atol=atol)
190
+
191
+ else:
192
+ attrs = list(zip(attr, default)) # Python 3
193
+
194
+ def match(datasets1, datasets2):
195
+ values1 = []
196
+ for data1 in datasets1.values():
197
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
198
+ values1.append(x)
199
+ values2 = []
200
+ for data2 in datasets2.values():
201
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
202
+ values2.append(x)
203
+ values1.sort()
204
+ values2.sort()
205
+ for xi, yi in zip(values1, values2):
206
+ if not allclose(xi, yi, rtol=rtol, atol=atol):
207
+ return False
208
+ else:
209
+ return True
210
+
211
+ return match
212
+
213
+
214
+ # Docstrings for numerical functions.
215
+ numerical_node_match.__doc__ = numerical_doc
216
+ numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge")
217
+ tmpdoc = numerical_doc.replace("node", "edge")
218
+ tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match")
219
+ numerical_multiedge_match.__doc__ = tmpdoc
220
+
221
+
222
+ generic_doc = """
223
+ Returns a comparison function for a generic attribute.
224
+
225
+ The value(s) of the attr(s) are compared using the specified
226
+ operators. If all the attributes are equal, then the constructed
227
+ function returns True.
228
+
229
+ Parameters
230
+ ----------
231
+ attr : string | list
232
+ The node attribute to compare, or a list of node attributes
233
+ to compare.
234
+ default : value | list
235
+ The default value for the node attribute, or a list of
236
+ default values for the node attributes.
237
+ op : callable | list
238
+ The operator to use when comparing attribute values, or a list
239
+ of operators to use when comparing values for each attribute.
240
+
241
+ Returns
242
+ -------
243
+ match : function
244
+ The customized, generic `node_match` function.
245
+
246
+ Examples
247
+ --------
248
+ >>> from operator import eq
249
+ >>> from math import isclose
250
+ >>> from networkx.algorithms.isomorphism import generic_node_match
251
+ >>> nm = generic_node_match("weight", 1.0, isclose)
252
+ >>> nm = generic_node_match("color", "red", eq)
253
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
254
+
255
+ """
256
+
257
+
258
+ def generic_node_match(attr, default, op):
259
+ if isinstance(attr, str):
260
+
261
+ def match(data1, data2):
262
+ return op(data1.get(attr, default), data2.get(attr, default))
263
+
264
+ else:
265
+ attrs = list(zip(attr, default, op)) # Python 3
266
+
267
+ def match(data1, data2):
268
+ for attr, d, operator in attrs:
269
+ if not operator(data1.get(attr, d), data2.get(attr, d)):
270
+ return False
271
+ else:
272
+ return True
273
+
274
+ return match
275
+
276
+
277
+ generic_edge_match = copyfunc(generic_node_match, "generic_edge_match")
278
+
279
+
280
+ def generic_multiedge_match(attr, default, op):
281
+ """Returns a comparison function for a generic attribute.
282
+
283
+ The value(s) of the attr(s) are compared using the specified
284
+ operators. If all the attributes are equal, then the constructed
285
+ function returns True. Potentially, the constructed edge_match
286
+ function can be slow since it must verify that no isomorphism
287
+ exists between the multiedges before it returns False.
288
+
289
+ Parameters
290
+ ----------
291
+ attr : string | list
292
+ The edge attribute to compare, or a list of node attributes
293
+ to compare.
294
+ default : value | list
295
+ The default value for the edge attribute, or a list of
296
+ default values for the edgeattributes.
297
+ op : callable | list
298
+ The operator to use when comparing attribute values, or a list
299
+ of operators to use when comparing values for each attribute.
300
+
301
+ Returns
302
+ -------
303
+ match : function
304
+ The customized, generic `edge_match` function.
305
+
306
+ Examples
307
+ --------
308
+ >>> from operator import eq
309
+ >>> from math import isclose
310
+ >>> from networkx.algorithms.isomorphism import generic_node_match
311
+ >>> nm = generic_node_match("weight", 1.0, isclose)
312
+ >>> nm = generic_node_match("color", "red", eq)
313
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
314
+
315
+ """
316
+
317
+ # This is slow, but generic.
318
+ # We must test every possible isomorphism between the edges.
319
+ if isinstance(attr, str):
320
+ attr = [attr]
321
+ default = [default]
322
+ op = [op]
323
+ attrs = list(zip(attr, default)) # Python 3
324
+
325
+ def match(datasets1, datasets2):
326
+ values1 = []
327
+ for data1 in datasets1.values():
328
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
329
+ values1.append(x)
330
+ values2 = []
331
+ for data2 in datasets2.values():
332
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
333
+ values2.append(x)
334
+ for vals2 in permutations(values2):
335
+ for xi, yi in zip(values1, vals2):
336
+ if not all(map(lambda x, y, z: z(x, y), xi, yi, op)):
337
+ # This is not an isomorphism, go to next permutation.
338
+ break
339
+ else:
340
+ # Then we found an isomorphism.
341
+ return True
342
+ else:
343
+ # Then there are no isomorphisms between the multiedges.
344
+ return False
345
+
346
+ return match
347
+
348
+
349
+ # Docstrings for numerical functions.
350
+ generic_node_match.__doc__ = generic_doc
351
+ generic_edge_match.__doc__ = generic_doc.replace("node", "edge")
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *****************************
3
+ Time-respecting VF2 Algorithm
4
+ *****************************
5
+
6
+ An extension of the VF2 algorithm for time-respecting graph isomorphism
7
+ testing in temporal graphs.
8
+
9
+ A temporal graph is one in which edges contain a datetime attribute,
10
+ denoting when interaction occurred between the incident nodes. A
11
+ time-respecting subgraph of a temporal graph is a subgraph such that
12
+ all interactions incident to a node occurred within a time threshold,
13
+ delta, of each other. A directed time-respecting subgraph has the
14
+ added constraint that incoming interactions to a node must precede
15
+ outgoing interactions from the same node - this enforces a sense of
16
+ directed flow.
17
+
18
+ Introduction
19
+ ------------
20
+
21
+ The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
22
+ extend the GraphMatcher and DiGraphMatcher classes, respectively,
23
+ to include temporal constraints on matches. This is achieved through
24
+ a semantic check, via the semantic_feasibility() function.
25
+
26
+ As well as including G1 (the graph in which to seek embeddings) and
27
+ G2 (the subgraph structure of interest), the name of the temporal
28
+ attribute on the edges and the time threshold, delta, must be supplied
29
+ as arguments to the matching constructors.
30
+
31
+ A delta of zero is the strictest temporal constraint on the match -
32
+ only embeddings in which all interactions occur at the same time will
33
+ be returned. A delta of one day will allow embeddings in which
34
+ adjacent interactions occur up to a day apart.
35
+
36
+ Examples
37
+ --------
38
+
39
+ Examples will be provided when the datetime type has been incorporated.
40
+
41
+
42
+ Temporal Subgraph Isomorphism
43
+ -----------------------------
44
+
45
+ A brief discussion of the somewhat diverse current literature will be
46
+ included here.
47
+
48
+ References
49
+ ----------
50
+
51
+ [1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
52
+ The 2013 IEEE/ACM International Conference on Advances in Social
53
+ Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
54
+ pages 1451 - 1452. [65]
55
+
56
+ For a discussion of the literature on temporal networks:
57
+
58
+ [3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
59
+ 519(3):97–125, 2012.
60
+
61
+ Notes
62
+ -----
63
+
64
+ Handles directed and undirected graphs and graphs with parallel edges.
65
+
66
+ """
67
+
68
+ import networkx as nx
69
+
70
+ from .isomorphvf2 import DiGraphMatcher, GraphMatcher
71
+
72
+ __all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"]
73
+
74
+
75
+ class TimeRespectingGraphMatcher(GraphMatcher):
76
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
77
+ """Initialize TimeRespectingGraphMatcher.
78
+
79
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
80
+
81
+ Examples
82
+ --------
83
+ To create a TimeRespectingGraphMatcher which checks for
84
+ syntactic and semantic feasibility:
85
+
86
+ >>> from networkx.algorithms import isomorphism
87
+ >>> from datetime import timedelta
88
+ >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
89
+
90
+ >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
91
+
92
+ >>> GM = isomorphism.TimeRespectingGraphMatcher(G1, G2, "date", timedelta(days=1))
93
+ """
94
+ self.temporal_attribute_name = temporal_attribute_name
95
+ self.delta = delta
96
+ super().__init__(G1, G2)
97
+
98
+ def one_hop(self, Gx, Gx_node, neighbors):
99
+ """
100
+ Edges one hop out from a node in the mapping should be
101
+ time-respecting with respect to each other.
102
+ """
103
+ dates = []
104
+ for n in neighbors:
105
+ if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary.
106
+ dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
107
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
108
+ for edge in Gx[Gx_node][
109
+ n
110
+ ].values(): # Iterates all edges between node pair.
111
+ dates.append(edge[self.temporal_attribute_name])
112
+ if any(x is None for x in dates):
113
+ raise ValueError("Datetime not supplied for at least one edge.")
114
+ return not dates or max(dates) - min(dates) <= self.delta
115
+
116
+ def two_hop(self, Gx, core_x, Gx_node, neighbors):
117
+ """
118
+ Paths of length 2 from Gx_node should be time-respecting.
119
+ """
120
+ return all(
121
+ self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node])
122
+ for v in neighbors
123
+ )
124
+
125
+ def semantic_feasibility(self, G1_node, G2_node):
126
+ """Returns True if adding (G1_node, G2_node) is semantically
127
+ feasible.
128
+
129
+ Any subclass which redefines semantic_feasibility() must
130
+ maintain the self.tests if needed, to keep the match() method
131
+ functional. Implementations should consider multigraphs.
132
+ """
133
+ neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
134
+ if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node.
135
+ return False
136
+ if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
137
+ return False
138
+ # Otherwise, this node is semantically feasible!
139
+ return True
140
+
141
+
142
+ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
143
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
144
+ """Initialize TimeRespectingDiGraphMatcher.
145
+
146
+ G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances.
147
+
148
+ Examples
149
+ --------
150
+ To create a TimeRespectingDiGraphMatcher which checks for
151
+ syntactic and semantic feasibility:
152
+
153
+ >>> from networkx.algorithms import isomorphism
154
+ >>> from datetime import timedelta
155
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
156
+
157
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
158
+
159
+ >>> GM = isomorphism.TimeRespectingDiGraphMatcher(G1, G2, "date", timedelta(days=1))
160
+ """
161
+ self.temporal_attribute_name = temporal_attribute_name
162
+ self.delta = delta
163
+ super().__init__(G1, G2)
164
+
165
+ def get_pred_dates(self, Gx, Gx_node, core_x, pred):
166
+ """
167
+ Get the dates of edges from predecessors.
168
+ """
169
+ pred_dates = []
170
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
171
+ for n in pred:
172
+ pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
173
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
174
+ for n in pred:
175
+ for edge in Gx[n][
176
+ Gx_node
177
+ ].values(): # Iterates all edge data between node pair.
178
+ pred_dates.append(edge[self.temporal_attribute_name])
179
+ return pred_dates
180
+
181
+ def get_succ_dates(self, Gx, Gx_node, core_x, succ):
182
+ """
183
+ Get the dates of edges to successors.
184
+ """
185
+ succ_dates = []
186
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
187
+ for n in succ:
188
+ succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
189
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
190
+ for n in succ:
191
+ for edge in Gx[Gx_node][
192
+ n
193
+ ].values(): # Iterates all edge data between node pair.
194
+ succ_dates.append(edge[self.temporal_attribute_name])
195
+ return succ_dates
196
+
197
+ def one_hop(self, Gx, Gx_node, core_x, pred, succ):
198
+ """
199
+ The ego node.
200
+ """
201
+ pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
202
+ succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
203
+ return self.test_one(pred_dates, succ_dates) and self.test_two(
204
+ pred_dates, succ_dates
205
+ )
206
+
207
+ def two_hop_pred(self, Gx, Gx_node, core_x, pred):
208
+ """
209
+ The predecessors of the ego node.
210
+ """
211
+ return all(
212
+ self.one_hop(
213
+ Gx,
214
+ p,
215
+ core_x,
216
+ self.preds(Gx, core_x, p),
217
+ self.succs(Gx, core_x, p, Gx_node),
218
+ )
219
+ for p in pred
220
+ )
221
+
222
+ def two_hop_succ(self, Gx, Gx_node, core_x, succ):
223
+ """
224
+ The successors of the ego node.
225
+ """
226
+ return all(
227
+ self.one_hop(
228
+ Gx,
229
+ s,
230
+ core_x,
231
+ self.preds(Gx, core_x, s, Gx_node),
232
+ self.succs(Gx, core_x, s),
233
+ )
234
+ for s in succ
235
+ )
236
+
237
+ def preds(self, Gx, core_x, v, Gx_node=None):
238
+ pred = [n for n in Gx.predecessors(v) if n in core_x]
239
+ if Gx_node:
240
+ pred.append(Gx_node)
241
+ return pred
242
+
243
+ def succs(self, Gx, core_x, v, Gx_node=None):
244
+ succ = [n for n in Gx.successors(v) if n in core_x]
245
+ if Gx_node:
246
+ succ.append(Gx_node)
247
+ return succ
248
+
249
+ def test_one(self, pred_dates, succ_dates):
250
+ """
251
+ Edges one hop out from Gx_node in the mapping should be
252
+ time-respecting with respect to each other, regardless of
253
+ direction.
254
+ """
255
+ time_respecting = True
256
+ dates = pred_dates + succ_dates
257
+
258
+ if any(x is None for x in dates):
259
+ raise ValueError("Date or datetime not supplied for at least one edge.")
260
+
261
+ dates.sort() # Small to large.
262
+ if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
263
+ time_respecting = False
264
+ return time_respecting
265
+
266
+ def test_two(self, pred_dates, succ_dates):
267
+ """
268
+ Edges from a dual Gx_node in the mapping should be ordered in
269
+ a time-respecting manner.
270
+ """
271
+ time_respecting = True
272
+ pred_dates.sort()
273
+ succ_dates.sort()
274
+ # First out before last in; negative of the necessary condition for time-respect.
275
+ if (
276
+ 0 < len(succ_dates)
277
+ and 0 < len(pred_dates)
278
+ and succ_dates[0] < pred_dates[-1]
279
+ ):
280
+ time_respecting = False
281
+ return time_respecting
282
+
283
+ def semantic_feasibility(self, G1_node, G2_node):
284
+ """Returns True if adding (G1_node, G2_node) is semantically
285
+ feasible.
286
+
287
+ Any subclass which redefines semantic_feasibility() must
288
+ maintain the self.tests if needed, to keep the match() method
289
+ functional. Implementations should consider multigraphs.
290
+ """
291
+ pred, succ = (
292
+ [n for n in self.G1.predecessors(G1_node) if n in self.core_1],
293
+ [n for n in self.G1.successors(G1_node) if n in self.core_1],
294
+ )
295
+ if not self.one_hop(
296
+ self.G1, G1_node, self.core_1, pred, succ
297
+ ): # Fail fast on first node.
298
+ return False
299
+ if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
300
+ return False
301
+ if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ):
302
+ return False
303
+ # Otherwise, this node is semantically feasible!
304
+ return True
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (205 Bytes). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc ADDED
Binary file (9.19 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc ADDED
Binary file (2.27 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc ADDED
Binary file (8.86 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc ADDED
Binary file (2.27 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc ADDED
Binary file (7.13 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc ADDED
Binary file (4.64 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc ADDED
Binary file (31.8 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc ADDED
Binary file (50.3 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc ADDED
Binary file (7.16 kB). View file
 
venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 ADDED
Binary file (1.44 kB). View file