applied-ai-018 commited on
Commit
96963b4
·
verified ·
1 Parent(s): 5d6ed72

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__init__.py +24 -0
  2. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-310.pyc +0 -0
  3. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-310.pyc +0 -0
  4. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-310.pyc +0 -0
  5. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-310.pyc +0 -0
  6. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-310.pyc +0 -0
  7. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-310.pyc +0 -0
  8. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-310.pyc +0 -0
  9. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-310.pyc +0 -0
  10. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-310.pyc +0 -0
  11. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-310.pyc +0 -0
  12. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-310.pyc +0 -0
  13. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/clique.py +258 -0
  14. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/clustering_coefficient.py +71 -0
  15. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/connectivity.py +412 -0
  16. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/distance_measures.py +150 -0
  17. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/dominating_set.py +148 -0
  18. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py +369 -0
  19. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/matching.py +43 -0
  20. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/maxcut.py +143 -0
  21. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/ramsey.py +52 -0
  22. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/steinertree.py +220 -0
  23. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  24. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-310.pyc +0 -0
  25. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-310.pyc +0 -0
  26. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-310.pyc +0 -0
  27. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-310.pyc +0 -0
  28. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py +1498 -0
  29. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/treewidth.py +252 -0
  30. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/vertex_cover.py +82 -0
  31. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-310.pyc +0 -0
  32. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-310.pyc +0 -0
  33. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-310.pyc +0 -0
  34. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-310.pyc +0 -0
  35. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-310.pyc +0 -0
  36. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-310.pyc +0 -0
  37. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-310.pyc +0 -0
  38. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__init__.py +0 -0
  39. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  40. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-310.pyc +0 -0
  41. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-310.pyc +0 -0
  42. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-310.pyc +0 -0
  43. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-310.pyc +0 -0
  44. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-310.pyc +0 -0
  45. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-310.pyc +0 -0
  46. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-310.pyc +0 -0
  47. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-310.pyc +0 -0
  48. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-310.pyc +0 -0
  49. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-310.pyc +0 -0
  50. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__init__.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Approximations of graph properties and Heuristic methods for optimization.
2
+
3
+ The functions in this class are not imported into the top-level ``networkx``
4
+ namespace so the easiest way to use them is with::
5
+
6
+ >>> from networkx.algorithms import approximation
7
+
8
+ Another option is to import the specific function with
9
+ ``from networkx.algorithms.approximation import function_name``.
10
+
11
+ """
12
+ from networkx.algorithms.approximation.clustering_coefficient import *
13
+ from networkx.algorithms.approximation.clique import *
14
+ from networkx.algorithms.approximation.connectivity import *
15
+ from networkx.algorithms.approximation.distance_measures import *
16
+ from networkx.algorithms.approximation.dominating_set import *
17
+ from networkx.algorithms.approximation.kcomponents import *
18
+ from networkx.algorithms.approximation.matching import *
19
+ from networkx.algorithms.approximation.ramsey import *
20
+ from networkx.algorithms.approximation.steinertree import *
21
+ from networkx.algorithms.approximation.traveling_salesman import *
22
+ from networkx.algorithms.approximation.treewidth import *
23
+ from networkx.algorithms.approximation.vertex_cover import *
24
+ from networkx.algorithms.approximation.maxcut import *
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.35 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-310.pyc ADDED
Binary file (7.96 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-310.pyc ADDED
Binary file (2.53 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-310.pyc ADDED
Binary file (11.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-310.pyc ADDED
Binary file (5.03 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-310.pyc ADDED
Binary file (4.46 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-310.pyc ADDED
Binary file (13.9 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-310.pyc ADDED
Binary file (1.44 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-310.pyc ADDED
Binary file (4.32 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-310.pyc ADDED
Binary file (1.57 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-310.pyc ADDED
Binary file (7.23 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/clique.py ADDED
@@ -0,0 +1,258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing large cliques and maximum independent sets."""
2
+ import networkx as nx
3
+ from networkx.algorithms.approximation import ramsey
4
+ from networkx.utils import not_implemented_for
5
+
6
+ __all__ = [
7
+ "clique_removal",
8
+ "max_clique",
9
+ "large_clique_size",
10
+ "maximum_independent_set",
11
+ ]
12
+
13
+
14
+ @not_implemented_for("directed")
15
+ @not_implemented_for("multigraph")
16
+ @nx._dispatchable
17
+ def maximum_independent_set(G):
18
+ """Returns an approximate maximum independent set.
19
+
20
+ Independent set or stable set is a set of vertices in a graph, no two of
21
+ which are adjacent. That is, it is a set I of vertices such that for every
22
+ two vertices in I, there is no edge connecting the two. Equivalently, each
23
+ edge in the graph has at most one endpoint in I. The size of an independent
24
+ set is the number of vertices it contains [1]_.
25
+
26
+ A maximum independent set is a largest independent set for a given graph G
27
+ and its size is denoted $\\alpha(G)$. The problem of finding such a set is called
28
+ the maximum independent set problem and is an NP-hard optimization problem.
29
+ As such, it is unlikely that there exists an efficient algorithm for finding
30
+ a maximum independent set of a graph.
31
+
32
+ The Independent Set algorithm is based on [2]_.
33
+
34
+ Parameters
35
+ ----------
36
+ G : NetworkX graph
37
+ Undirected graph
38
+
39
+ Returns
40
+ -------
41
+ iset : Set
42
+ The apx-maximum independent set
43
+
44
+ Examples
45
+ --------
46
+ >>> G = nx.path_graph(10)
47
+ >>> nx.approximation.maximum_independent_set(G)
48
+ {0, 2, 4, 6, 9}
49
+
50
+ Raises
51
+ ------
52
+ NetworkXNotImplemented
53
+ If the graph is directed or is a multigraph.
54
+
55
+ Notes
56
+ -----
57
+ Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case.
58
+
59
+ References
60
+ ----------
61
+ .. [1] `Wikipedia: Independent set
62
+ <https://en.wikipedia.org/wiki/Independent_set_(graph_theory)>`_
63
+ .. [2] Boppana, R., & Halldórsson, M. M. (1992).
64
+ Approximating maximum independent sets by excluding subgraphs.
65
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
66
+ """
67
+ iset, _ = clique_removal(G)
68
+ return iset
69
+
70
+
71
+ @not_implemented_for("directed")
72
+ @not_implemented_for("multigraph")
73
+ @nx._dispatchable
74
+ def max_clique(G):
75
+ r"""Find the Maximum Clique
76
+
77
+ Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set
78
+ in the worst case.
79
+
80
+ Parameters
81
+ ----------
82
+ G : NetworkX graph
83
+ Undirected graph
84
+
85
+ Returns
86
+ -------
87
+ clique : set
88
+ The apx-maximum clique of the graph
89
+
90
+ Examples
91
+ --------
92
+ >>> G = nx.path_graph(10)
93
+ >>> nx.approximation.max_clique(G)
94
+ {8, 9}
95
+
96
+ Raises
97
+ ------
98
+ NetworkXNotImplemented
99
+ If the graph is directed or is a multigraph.
100
+
101
+ Notes
102
+ -----
103
+ A clique in an undirected graph G = (V, E) is a subset of the vertex set
104
+ `C \subseteq V` such that for every two vertices in C there exists an edge
105
+ connecting the two. This is equivalent to saying that the subgraph
106
+ induced by C is complete (in some cases, the term clique may also refer
107
+ to the subgraph).
108
+
109
+ A maximum clique is a clique of the largest possible size in a given graph.
110
+ The clique number `\omega(G)` of a graph G is the number of
111
+ vertices in a maximum clique in G. The intersection number of
112
+ G is the smallest number of cliques that together cover all edges of G.
113
+
114
+ https://en.wikipedia.org/wiki/Maximum_clique
115
+
116
+ References
117
+ ----------
118
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
119
+ Approximating maximum independent sets by excluding subgraphs.
120
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
121
+ doi:10.1007/BF01994876
122
+ """
123
+ # finding the maximum clique in a graph is equivalent to finding
124
+ # the independent set in the complementary graph
125
+ cgraph = nx.complement(G)
126
+ iset, _ = clique_removal(cgraph)
127
+ return iset
128
+
129
+
130
+ @not_implemented_for("directed")
131
+ @not_implemented_for("multigraph")
132
+ @nx._dispatchable
133
+ def clique_removal(G):
134
+ r"""Repeatedly remove cliques from the graph.
135
+
136
+ Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique
137
+ and independent set. Returns the largest independent set found, along
138
+ with found maximal cliques.
139
+
140
+ Parameters
141
+ ----------
142
+ G : NetworkX graph
143
+ Undirected graph
144
+
145
+ Returns
146
+ -------
147
+ max_ind_cliques : (set, list) tuple
148
+ 2-tuple of Maximal Independent Set and list of maximal cliques (sets).
149
+
150
+ Examples
151
+ --------
152
+ >>> G = nx.path_graph(10)
153
+ >>> nx.approximation.clique_removal(G)
154
+ ({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}])
155
+
156
+ Raises
157
+ ------
158
+ NetworkXNotImplemented
159
+ If the graph is directed or is a multigraph.
160
+
161
+ References
162
+ ----------
163
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
164
+ Approximating maximum independent sets by excluding subgraphs.
165
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
166
+ """
167
+ graph = G.copy()
168
+ c_i, i_i = ramsey.ramsey_R2(graph)
169
+ cliques = [c_i]
170
+ isets = [i_i]
171
+ while graph:
172
+ graph.remove_nodes_from(c_i)
173
+ c_i, i_i = ramsey.ramsey_R2(graph)
174
+ if c_i:
175
+ cliques.append(c_i)
176
+ if i_i:
177
+ isets.append(i_i)
178
+ # Determine the largest independent set as measured by cardinality.
179
+ maxiset = max(isets, key=len)
180
+ return maxiset, cliques
181
+
182
+
183
+ @not_implemented_for("directed")
184
+ @not_implemented_for("multigraph")
185
+ @nx._dispatchable
186
+ def large_clique_size(G):
187
+ """Find the size of a large clique in a graph.
188
+
189
+ A *clique* is a subset of nodes in which each pair of nodes is
190
+ adjacent. This function is a heuristic for finding the size of a
191
+ large clique in the graph.
192
+
193
+ Parameters
194
+ ----------
195
+ G : NetworkX graph
196
+
197
+ Returns
198
+ -------
199
+ k: integer
200
+ The size of a large clique in the graph.
201
+
202
+ Examples
203
+ --------
204
+ >>> G = nx.path_graph(10)
205
+ >>> nx.approximation.large_clique_size(G)
206
+ 2
207
+
208
+ Raises
209
+ ------
210
+ NetworkXNotImplemented
211
+ If the graph is directed or is a multigraph.
212
+
213
+ Notes
214
+ -----
215
+ This implementation is from [1]_. Its worst case time complexity is
216
+ :math:`O(n d^2)`, where *n* is the number of nodes in the graph and
217
+ *d* is the maximum degree.
218
+
219
+ This function is a heuristic, which means it may work well in
220
+ practice, but there is no rigorous mathematical guarantee on the
221
+ ratio between the returned number and the actual largest clique size
222
+ in the graph.
223
+
224
+ References
225
+ ----------
226
+ .. [1] Pattabiraman, Bharath, et al.
227
+ "Fast Algorithms for the Maximum Clique Problem on Massive Graphs
228
+ with Applications to Overlapping Community Detection."
229
+ *Internet Mathematics* 11.4-5 (2015): 421--448.
230
+ <https://doi.org/10.1080/15427951.2014.986778>
231
+
232
+ See also
233
+ --------
234
+
235
+ :func:`networkx.algorithms.approximation.clique.max_clique`
236
+ A function that returns an approximate maximum clique with a
237
+ guarantee on the approximation ratio.
238
+
239
+ :mod:`networkx.algorithms.clique`
240
+ Functions for finding the exact maximum clique in a graph.
241
+
242
+ """
243
+ degrees = G.degree
244
+
245
+ def _clique_heuristic(G, U, size, best_size):
246
+ if not U:
247
+ return max(best_size, size)
248
+ u = max(U, key=degrees)
249
+ U.remove(u)
250
+ N_prime = {v for v in G[u] if degrees[v] >= best_size}
251
+ return _clique_heuristic(G, U & N_prime, size + 1, best_size)
252
+
253
+ best_size = 0
254
+ nodes = (u for u in G if degrees[u] >= best_size)
255
+ for u in nodes:
256
+ neighbors = {v for v in G[u] if degrees[v] >= best_size}
257
+ best_size = _clique_heuristic(G, neighbors, 1, best_size)
258
+ return best_size
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/clustering_coefficient.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.utils import not_implemented_for, py_random_state
3
+
4
+ __all__ = ["average_clustering"]
5
+
6
+
7
+ @not_implemented_for("directed")
8
+ @py_random_state(2)
9
+ @nx._dispatchable(name="approximate_average_clustering")
10
+ def average_clustering(G, trials=1000, seed=None):
11
+ r"""Estimates the average clustering coefficient of G.
12
+
13
+ The local clustering of each node in `G` is the fraction of triangles
14
+ that actually exist over all possible triangles in its neighborhood.
15
+ The average clustering coefficient of a graph `G` is the mean of
16
+ local clusterings.
17
+
18
+ This function finds an approximate average clustering coefficient
19
+ for G by repeating `n` times (defined in `trials`) the following
20
+ experiment: choose a node at random, choose two of its neighbors
21
+ at random, and check if they are connected. The approximate
22
+ coefficient is the fraction of triangles found over the number
23
+ of trials [1]_.
24
+
25
+ Parameters
26
+ ----------
27
+ G : NetworkX graph
28
+
29
+ trials : integer
30
+ Number of trials to perform (default 1000).
31
+
32
+ seed : integer, random_state, or None (default)
33
+ Indicator of random number generation state.
34
+ See :ref:`Randomness<randomness>`.
35
+
36
+ Returns
37
+ -------
38
+ c : float
39
+ Approximated average clustering coefficient.
40
+
41
+ Examples
42
+ --------
43
+ >>> from networkx.algorithms import approximation
44
+ >>> G = nx.erdos_renyi_graph(10, 0.2, seed=10)
45
+ >>> approximation.average_clustering(G, trials=1000, seed=10)
46
+ 0.214
47
+
48
+ Raises
49
+ ------
50
+ NetworkXNotImplemented
51
+ If G is directed.
52
+
53
+ References
54
+ ----------
55
+ .. [1] Schank, Thomas, and Dorothea Wagner. Approximating clustering
56
+ coefficient and transitivity. Universität Karlsruhe, Fakultät für
57
+ Informatik, 2004.
58
+ https://doi.org/10.5445/IR/1000001239
59
+
60
+ """
61
+ n = len(G)
62
+ triangles = 0
63
+ nodes = list(G)
64
+ for i in [int(seed.random() * n) for i in range(trials)]:
65
+ nbrs = list(G[nodes[i]])
66
+ if len(nbrs) < 2:
67
+ continue
68
+ u, v = seed.sample(nbrs, 2)
69
+ if u in G[v]:
70
+ triangles += 1
71
+ return triangles / trials
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/connectivity.py ADDED
@@ -0,0 +1,412 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Fast approximation for node connectivity
2
+ """
3
+ import itertools
4
+ from operator import itemgetter
5
+
6
+ import networkx as nx
7
+
8
+ __all__ = [
9
+ "local_node_connectivity",
10
+ "node_connectivity",
11
+ "all_pairs_node_connectivity",
12
+ ]
13
+
14
+
15
+ @nx._dispatchable(name="approximate_local_node_connectivity")
16
+ def local_node_connectivity(G, source, target, cutoff=None):
17
+ """Compute node connectivity between source and target.
18
+
19
+ Pairwise or local node connectivity between two distinct and nonadjacent
20
+ nodes is the minimum number of nodes that must be removed (minimum
21
+ separating cutset) to disconnect them. By Menger's theorem, this is equal
22
+ to the number of node independent paths (paths that share no nodes other
23
+ than source and target). Which is what we compute in this function.
24
+
25
+ This algorithm is a fast approximation that gives an strict lower
26
+ bound on the actual number of node independent paths between two nodes [1]_.
27
+ It works for both directed and undirected graphs.
28
+
29
+ Parameters
30
+ ----------
31
+
32
+ G : NetworkX graph
33
+
34
+ source : node
35
+ Starting node for node connectivity
36
+
37
+ target : node
38
+ Ending node for node connectivity
39
+
40
+ cutoff : integer
41
+ Maximum node connectivity to consider. If None, the minimum degree
42
+ of source or target is used as a cutoff. Default value None.
43
+
44
+ Returns
45
+ -------
46
+ k: integer
47
+ pairwise node connectivity
48
+
49
+ Examples
50
+ --------
51
+ >>> # Platonic octahedral graph has node connectivity 4
52
+ >>> # for each non adjacent node pair
53
+ >>> from networkx.algorithms import approximation as approx
54
+ >>> G = nx.octahedral_graph()
55
+ >>> approx.local_node_connectivity(G, 0, 5)
56
+ 4
57
+
58
+ Notes
59
+ -----
60
+ This algorithm [1]_ finds node independents paths between two nodes by
61
+ computing their shortest path using BFS, marking the nodes of the path
62
+ found as 'used' and then searching other shortest paths excluding the
63
+ nodes marked as used until no more paths exist. It is not exact because
64
+ a shortest path could use nodes that, if the path were longer, may belong
65
+ to two different node independent paths. Thus it only guarantees an
66
+ strict lower bound on node connectivity.
67
+
68
+ Note that the authors propose a further refinement, losing accuracy and
69
+ gaining speed, which is not implemented yet.
70
+
71
+ See also
72
+ --------
73
+ all_pairs_node_connectivity
74
+ node_connectivity
75
+
76
+ References
77
+ ----------
78
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
79
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
80
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
81
+
82
+ """
83
+ if target == source:
84
+ raise nx.NetworkXError("source and target have to be different nodes.")
85
+
86
+ # Maximum possible node independent paths
87
+ if G.is_directed():
88
+ possible = min(G.out_degree(source), G.in_degree(target))
89
+ else:
90
+ possible = min(G.degree(source), G.degree(target))
91
+
92
+ K = 0
93
+ if not possible:
94
+ return K
95
+
96
+ if cutoff is None:
97
+ cutoff = float("inf")
98
+
99
+ exclude = set()
100
+ for i in range(min(possible, cutoff)):
101
+ try:
102
+ path = _bidirectional_shortest_path(G, source, target, exclude)
103
+ exclude.update(set(path))
104
+ K += 1
105
+ except nx.NetworkXNoPath:
106
+ break
107
+
108
+ return K
109
+
110
+
111
+ @nx._dispatchable(name="approximate_node_connectivity")
112
+ def node_connectivity(G, s=None, t=None):
113
+ r"""Returns an approximation for node connectivity for a graph or digraph G.
114
+
115
+ Node connectivity is equal to the minimum number of nodes that
116
+ must be removed to disconnect G or render it trivial. By Menger's theorem,
117
+ this is equal to the number of node independent paths (paths that
118
+ share no nodes other than source and target).
119
+
120
+ If source and target nodes are provided, this function returns the
121
+ local node connectivity: the minimum number of nodes that must be
122
+ removed to break all paths from source to target in G.
123
+
124
+ This algorithm is based on a fast approximation that gives an strict lower
125
+ bound on the actual number of node independent paths between two nodes [1]_.
126
+ It works for both directed and undirected graphs.
127
+
128
+ Parameters
129
+ ----------
130
+ G : NetworkX graph
131
+ Undirected graph
132
+
133
+ s : node
134
+ Source node. Optional. Default value: None.
135
+
136
+ t : node
137
+ Target node. Optional. Default value: None.
138
+
139
+ Returns
140
+ -------
141
+ K : integer
142
+ Node connectivity of G, or local node connectivity if source
143
+ and target are provided.
144
+
145
+ Examples
146
+ --------
147
+ >>> # Platonic octahedral graph is 4-node-connected
148
+ >>> from networkx.algorithms import approximation as approx
149
+ >>> G = nx.octahedral_graph()
150
+ >>> approx.node_connectivity(G)
151
+ 4
152
+
153
+ Notes
154
+ -----
155
+ This algorithm [1]_ finds node independents paths between two nodes by
156
+ computing their shortest path using BFS, marking the nodes of the path
157
+ found as 'used' and then searching other shortest paths excluding the
158
+ nodes marked as used until no more paths exist. It is not exact because
159
+ a shortest path could use nodes that, if the path were longer, may belong
160
+ to two different node independent paths. Thus it only guarantees an
161
+ strict lower bound on node connectivity.
162
+
163
+ See also
164
+ --------
165
+ all_pairs_node_connectivity
166
+ local_node_connectivity
167
+
168
+ References
169
+ ----------
170
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
171
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
172
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
173
+
174
+ """
175
+ if (s is not None and t is None) or (s is None and t is not None):
176
+ raise nx.NetworkXError("Both source and target must be specified.")
177
+
178
+ # Local node connectivity
179
+ if s is not None and t is not None:
180
+ if s not in G:
181
+ raise nx.NetworkXError(f"node {s} not in graph")
182
+ if t not in G:
183
+ raise nx.NetworkXError(f"node {t} not in graph")
184
+ return local_node_connectivity(G, s, t)
185
+
186
+ # Global node connectivity
187
+ if G.is_directed():
188
+ connected_func = nx.is_weakly_connected
189
+ iter_func = itertools.permutations
190
+
191
+ def neighbors(v):
192
+ return itertools.chain(G.predecessors(v), G.successors(v))
193
+
194
+ else:
195
+ connected_func = nx.is_connected
196
+ iter_func = itertools.combinations
197
+ neighbors = G.neighbors
198
+
199
+ if not connected_func(G):
200
+ return 0
201
+
202
+ # Choose a node with minimum degree
203
+ v, minimum_degree = min(G.degree(), key=itemgetter(1))
204
+ # Node connectivity is bounded by minimum degree
205
+ K = minimum_degree
206
+ # compute local node connectivity with all non-neighbors nodes
207
+ # and store the minimum
208
+ for w in set(G) - set(neighbors(v)) - {v}:
209
+ K = min(K, local_node_connectivity(G, v, w, cutoff=K))
210
+ # Same for non adjacent pairs of neighbors of v
211
+ for x, y in iter_func(neighbors(v), 2):
212
+ if y not in G[x] and x != y:
213
+ K = min(K, local_node_connectivity(G, x, y, cutoff=K))
214
+ return K
215
+
216
+
217
+ @nx._dispatchable(name="approximate_all_pairs_node_connectivity")
218
+ def all_pairs_node_connectivity(G, nbunch=None, cutoff=None):
219
+ """Compute node connectivity between all pairs of nodes.
220
+
221
+ Pairwise or local node connectivity between two distinct and nonadjacent
222
+ nodes is the minimum number of nodes that must be removed (minimum
223
+ separating cutset) to disconnect them. By Menger's theorem, this is equal
224
+ to the number of node independent paths (paths that share no nodes other
225
+ than source and target). Which is what we compute in this function.
226
+
227
+ This algorithm is a fast approximation that gives an strict lower
228
+ bound on the actual number of node independent paths between two nodes [1]_.
229
+ It works for both directed and undirected graphs.
230
+
231
+
232
+ Parameters
233
+ ----------
234
+ G : NetworkX graph
235
+
236
+ nbunch: container
237
+ Container of nodes. If provided node connectivity will be computed
238
+ only over pairs of nodes in nbunch.
239
+
240
+ cutoff : integer
241
+ Maximum node connectivity to consider. If None, the minimum degree
242
+ of source or target is used as a cutoff in each pair of nodes.
243
+ Default value None.
244
+
245
+ Returns
246
+ -------
247
+ K : dictionary
248
+ Dictionary, keyed by source and target, of pairwise node connectivity
249
+
250
+ Examples
251
+ --------
252
+ A 3 node cycle with one extra node attached has connectivity 2 between all
253
+ nodes in the cycle and connectivity 1 between the extra node and the rest:
254
+
255
+ >>> G = nx.cycle_graph(3)
256
+ >>> G.add_edge(2, 3)
257
+ >>> import pprint # for nice dictionary formatting
258
+ >>> pprint.pprint(nx.all_pairs_node_connectivity(G))
259
+ {0: {1: 2, 2: 2, 3: 1},
260
+ 1: {0: 2, 2: 2, 3: 1},
261
+ 2: {0: 2, 1: 2, 3: 1},
262
+ 3: {0: 1, 1: 1, 2: 1}}
263
+
264
+ See Also
265
+ --------
266
+ local_node_connectivity
267
+ node_connectivity
268
+
269
+ References
270
+ ----------
271
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
272
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
273
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
274
+ """
275
+ if nbunch is None:
276
+ nbunch = G
277
+ else:
278
+ nbunch = set(nbunch)
279
+
280
+ directed = G.is_directed()
281
+ if directed:
282
+ iter_func = itertools.permutations
283
+ else:
284
+ iter_func = itertools.combinations
285
+
286
+ all_pairs = {n: {} for n in nbunch}
287
+
288
+ for u, v in iter_func(nbunch, 2):
289
+ k = local_node_connectivity(G, u, v, cutoff=cutoff)
290
+ all_pairs[u][v] = k
291
+ if not directed:
292
+ all_pairs[v][u] = k
293
+
294
+ return all_pairs
295
+
296
+
297
+ def _bidirectional_shortest_path(G, source, target, exclude):
298
+ """Returns shortest path between source and target ignoring nodes in the
299
+ container 'exclude'.
300
+
301
+ Parameters
302
+ ----------
303
+
304
+ G : NetworkX graph
305
+
306
+ source : node
307
+ Starting node for path
308
+
309
+ target : node
310
+ Ending node for path
311
+
312
+ exclude: container
313
+ Container for nodes to exclude from the search for shortest paths
314
+
315
+ Returns
316
+ -------
317
+ path: list
318
+ Shortest path between source and target ignoring nodes in 'exclude'
319
+
320
+ Raises
321
+ ------
322
+ NetworkXNoPath
323
+ If there is no path or if nodes are adjacent and have only one path
324
+ between them
325
+
326
+ Notes
327
+ -----
328
+ This function and its helper are originally from
329
+ networkx.algorithms.shortest_paths.unweighted and are modified to
330
+ accept the extra parameter 'exclude', which is a container for nodes
331
+ already used in other paths that should be ignored.
332
+
333
+ References
334
+ ----------
335
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
336
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
337
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
338
+
339
+ """
340
+ # call helper to do the real work
341
+ results = _bidirectional_pred_succ(G, source, target, exclude)
342
+ pred, succ, w = results
343
+
344
+ # build path from pred+w+succ
345
+ path = []
346
+ # from source to w
347
+ while w is not None:
348
+ path.append(w)
349
+ w = pred[w]
350
+ path.reverse()
351
+ # from w to target
352
+ w = succ[path[-1]]
353
+ while w is not None:
354
+ path.append(w)
355
+ w = succ[w]
356
+
357
+ return path
358
+
359
+
360
+ def _bidirectional_pred_succ(G, source, target, exclude):
361
+ # does BFS from both source and target and meets in the middle
362
+ # excludes nodes in the container "exclude" from the search
363
+
364
+ # handle either directed or undirected
365
+ if G.is_directed():
366
+ Gpred = G.predecessors
367
+ Gsucc = G.successors
368
+ else:
369
+ Gpred = G.neighbors
370
+ Gsucc = G.neighbors
371
+
372
+ # predecessor and successors in search
373
+ pred = {source: None}
374
+ succ = {target: None}
375
+
376
+ # initialize fringes, start with forward
377
+ forward_fringe = [source]
378
+ reverse_fringe = [target]
379
+
380
+ level = 0
381
+
382
+ while forward_fringe and reverse_fringe:
383
+ # Make sure that we iterate one step forward and one step backwards
384
+ # thus source and target will only trigger "found path" when they are
385
+ # adjacent and then they can be safely included in the container 'exclude'
386
+ level += 1
387
+ if level % 2 != 0:
388
+ this_level = forward_fringe
389
+ forward_fringe = []
390
+ for v in this_level:
391
+ for w in Gsucc(v):
392
+ if w in exclude:
393
+ continue
394
+ if w not in pred:
395
+ forward_fringe.append(w)
396
+ pred[w] = v
397
+ if w in succ:
398
+ return pred, succ, w # found path
399
+ else:
400
+ this_level = reverse_fringe
401
+ reverse_fringe = []
402
+ for v in this_level:
403
+ for w in Gpred(v):
404
+ if w in exclude:
405
+ continue
406
+ if w not in succ:
407
+ succ[w] = v
408
+ reverse_fringe.append(w)
409
+ if w in pred:
410
+ return pred, succ, w # found path
411
+
412
+ raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/distance_measures.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Distance measures approximated metrics."""
2
+
3
+ import networkx as nx
4
+ from networkx.utils.decorators import py_random_state
5
+
6
+ __all__ = ["diameter"]
7
+
8
+
9
+ @py_random_state(1)
10
+ @nx._dispatchable(name="approximate_diameter")
11
+ def diameter(G, seed=None):
12
+ """Returns a lower bound on the diameter of the graph G.
13
+
14
+ The function computes a lower bound on the diameter (i.e., the maximum eccentricity)
15
+ of a directed or undirected graph G. The procedure used varies depending on the graph
16
+ being directed or not.
17
+
18
+ If G is an `undirected` graph, then the function uses the `2-sweep` algorithm [1]_.
19
+ The main idea is to pick the farthest node from a random node and return its eccentricity.
20
+
21
+ Otherwise, if G is a `directed` graph, the function uses the `2-dSweep` algorithm [2]_,
22
+ The procedure starts by selecting a random source node $s$ from which it performs a
23
+ forward and a backward BFS. Let $a_1$ and $a_2$ be the farthest nodes in the forward and
24
+ backward cases, respectively. Then, it computes the backward eccentricity of $a_1$ using
25
+ a backward BFS and the forward eccentricity of $a_2$ using a forward BFS.
26
+ Finally, it returns the best lower bound between the two.
27
+
28
+ In both cases, the time complexity is linear with respect to the size of G.
29
+
30
+ Parameters
31
+ ----------
32
+ G : NetworkX graph
33
+
34
+ seed : integer, random_state, or None (default)
35
+ Indicator of random number generation state.
36
+ See :ref:`Randomness<randomness>`.
37
+
38
+ Returns
39
+ -------
40
+ d : integer
41
+ Lower Bound on the Diameter of G
42
+
43
+ Examples
44
+ --------
45
+ >>> G = nx.path_graph(10) # undirected graph
46
+ >>> nx.diameter(G)
47
+ 9
48
+ >>> G = nx.cycle_graph(3, create_using=nx.DiGraph) # directed graph
49
+ >>> nx.diameter(G)
50
+ 2
51
+
52
+ Raises
53
+ ------
54
+ NetworkXError
55
+ If the graph is empty or
56
+ If the graph is undirected and not connected or
57
+ If the graph is directed and not strongly connected.
58
+
59
+ See Also
60
+ --------
61
+ networkx.algorithms.distance_measures.diameter
62
+
63
+ References
64
+ ----------
65
+ .. [1] Magnien, Clémence, Matthieu Latapy, and Michel Habib.
66
+ *Fast computation of empirically tight bounds for the diameter of massive graphs.*
67
+ Journal of Experimental Algorithmics (JEA), 2009.
68
+ https://arxiv.org/pdf/0904.2728.pdf
69
+ .. [2] Crescenzi, Pierluigi, Roberto Grossi, Leonardo Lanzi, and Andrea Marino.
70
+ *On computing the diameter of real-world directed (weighted) graphs.*
71
+ International Symposium on Experimental Algorithms. Springer, Berlin, Heidelberg, 2012.
72
+ https://courses.cs.ut.ee/MTAT.03.238/2014_fall/uploads/Main/diameter.pdf
73
+ """
74
+ # if G is empty
75
+ if not G:
76
+ raise nx.NetworkXError("Expected non-empty NetworkX graph!")
77
+ # if there's only a node
78
+ if G.number_of_nodes() == 1:
79
+ return 0
80
+ # if G is directed
81
+ if G.is_directed():
82
+ return _two_sweep_directed(G, seed)
83
+ # else if G is undirected
84
+ return _two_sweep_undirected(G, seed)
85
+
86
+
87
+ def _two_sweep_undirected(G, seed):
88
+ """Helper function for finding a lower bound on the diameter
89
+ for undirected Graphs.
90
+
91
+ The idea is to pick the farthest node from a random node
92
+ and return its eccentricity.
93
+
94
+ ``G`` is a NetworkX undirected graph.
95
+
96
+ .. note::
97
+
98
+ ``seed`` is a random.Random or numpy.random.RandomState instance
99
+ """
100
+ # select a random source node
101
+ source = seed.choice(list(G))
102
+ # get the distances to the other nodes
103
+ distances = nx.shortest_path_length(G, source)
104
+ # if some nodes have not been visited, then the graph is not connected
105
+ if len(distances) != len(G):
106
+ raise nx.NetworkXError("Graph not connected.")
107
+ # take a node that is (one of) the farthest nodes from the source
108
+ *_, node = distances
109
+ # return the eccentricity of the node
110
+ return nx.eccentricity(G, node)
111
+
112
+
113
+ def _two_sweep_directed(G, seed):
114
+ """Helper function for finding a lower bound on the diameter
115
+ for directed Graphs.
116
+
117
+ It implements 2-dSweep, the directed version of the 2-sweep algorithm.
118
+ The algorithm follows the following steps.
119
+ 1. Select a source node $s$ at random.
120
+ 2. Perform a forward BFS from $s$ to select a node $a_1$ at the maximum
121
+ distance from the source, and compute $LB_1$, the backward eccentricity of $a_1$.
122
+ 3. Perform a backward BFS from $s$ to select a node $a_2$ at the maximum
123
+ distance from the source, and compute $LB_2$, the forward eccentricity of $a_2$.
124
+ 4. Return the maximum between $LB_1$ and $LB_2$.
125
+
126
+ ``G`` is a NetworkX directed graph.
127
+
128
+ .. note::
129
+
130
+ ``seed`` is a random.Random or numpy.random.RandomState instance
131
+ """
132
+ # get a new digraph G' with the edges reversed in the opposite direction
133
+ G_reversed = G.reverse()
134
+ # select a random source node
135
+ source = seed.choice(list(G))
136
+ # compute forward distances from source
137
+ forward_distances = nx.shortest_path_length(G, source)
138
+ # compute backward distances from source
139
+ backward_distances = nx.shortest_path_length(G_reversed, source)
140
+ # if either the source can't reach every node or not every node
141
+ # can reach the source, then the graph is not strongly connected
142
+ n = len(G)
143
+ if len(forward_distances) != n or len(backward_distances) != n:
144
+ raise nx.NetworkXError("DiGraph not strongly connected.")
145
+ # take a node a_1 at the maximum distance from the source in G
146
+ *_, a_1 = forward_distances
147
+ # take a node a_2 at the maximum distance from the source in G_reversed
148
+ *_, a_2 = backward_distances
149
+ # return the max between the backward eccentricity of a_1 and the forward eccentricity of a_2
150
+ return max(nx.eccentricity(G_reversed, a_1), nx.eccentricity(G, a_2))
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/dominating_set.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for finding node and edge dominating sets.
2
+
3
+ A `dominating set`_ for an undirected graph *G* with vertex set *V*
4
+ and edge set *E* is a subset *D* of *V* such that every vertex not in
5
+ *D* is adjacent to at least one member of *D*. An `edge dominating set`_
6
+ is a subset *F* of *E* such that every edge not in *F* is
7
+ incident to an endpoint of at least one edge in *F*.
8
+
9
+ .. _dominating set: https://en.wikipedia.org/wiki/Dominating_set
10
+ .. _edge dominating set: https://en.wikipedia.org/wiki/Edge_dominating_set
11
+
12
+ """
13
+ import networkx as nx
14
+
15
+ from ...utils import not_implemented_for
16
+ from ..matching import maximal_matching
17
+
18
+ __all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"]
19
+
20
+
21
+ # TODO Why doesn't this algorithm work for directed graphs?
22
+ @not_implemented_for("directed")
23
+ @nx._dispatchable(node_attrs="weight")
24
+ def min_weighted_dominating_set(G, weight=None):
25
+ r"""Returns a dominating set that approximates the minimum weight node
26
+ dominating set.
27
+
28
+ Parameters
29
+ ----------
30
+ G : NetworkX graph
31
+ Undirected graph.
32
+
33
+ weight : string
34
+ The node attribute storing the weight of an node. If provided,
35
+ the node attribute with this key must be a number for each
36
+ node. If not provided, each node is assumed to have weight one.
37
+
38
+ Returns
39
+ -------
40
+ min_weight_dominating_set : set
41
+ A set of nodes, the sum of whose weights is no more than `(\log
42
+ w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of
43
+ each node in the graph and `w(V^*)` denotes the sum of the
44
+ weights of each node in the minimum weight dominating set.
45
+
46
+ Examples
47
+ --------
48
+ >>> G = nx.Graph([(0, 1), (0, 4), (1, 4), (1, 2), (2, 3), (3, 4), (2, 5)])
49
+ >>> nx.approximation.min_weighted_dominating_set(G)
50
+ {1, 2, 4}
51
+
52
+ Raises
53
+ ------
54
+ NetworkXNotImplemented
55
+ If G is directed.
56
+
57
+ Notes
58
+ -----
59
+ This algorithm computes an approximate minimum weighted dominating
60
+ set for the graph `G`. The returned solution has weight `(\log
61
+ w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of each
62
+ node in the graph and `w(V^*)` denotes the sum of the weights of
63
+ each node in the minimum weight dominating set for the graph.
64
+
65
+ This implementation of the algorithm runs in $O(m)$ time, where $m$
66
+ is the number of edges in the graph.
67
+
68
+ References
69
+ ----------
70
+ .. [1] Vazirani, Vijay V.
71
+ *Approximation Algorithms*.
72
+ Springer Science & Business Media, 2001.
73
+
74
+ """
75
+ # The unique dominating set for the null graph is the empty set.
76
+ if len(G) == 0:
77
+ return set()
78
+
79
+ # This is the dominating set that will eventually be returned.
80
+ dom_set = set()
81
+
82
+ def _cost(node_and_neighborhood):
83
+ """Returns the cost-effectiveness of greedily choosing the given
84
+ node.
85
+
86
+ `node_and_neighborhood` is a two-tuple comprising a node and its
87
+ closed neighborhood.
88
+
89
+ """
90
+ v, neighborhood = node_and_neighborhood
91
+ return G.nodes[v].get(weight, 1) / len(neighborhood - dom_set)
92
+
93
+ # This is a set of all vertices not already covered by the
94
+ # dominating set.
95
+ vertices = set(G)
96
+ # This is a dictionary mapping each node to the closed neighborhood
97
+ # of that node.
98
+ neighborhoods = {v: {v} | set(G[v]) for v in G}
99
+
100
+ # Continue until all vertices are adjacent to some node in the
101
+ # dominating set.
102
+ while vertices:
103
+ # Find the most cost-effective node to add, along with its
104
+ # closed neighborhood.
105
+ dom_node, min_set = min(neighborhoods.items(), key=_cost)
106
+ # Add the node to the dominating set and reduce the remaining
107
+ # set of nodes to cover.
108
+ dom_set.add(dom_node)
109
+ del neighborhoods[dom_node]
110
+ vertices -= min_set
111
+
112
+ return dom_set
113
+
114
+
115
+ @nx._dispatchable
116
+ def min_edge_dominating_set(G):
117
+ r"""Returns minimum cardinality edge dominating set.
118
+
119
+ Parameters
120
+ ----------
121
+ G : NetworkX graph
122
+ Undirected graph
123
+
124
+ Returns
125
+ -------
126
+ min_edge_dominating_set : set
127
+ Returns a set of dominating edges whose size is no more than 2 * OPT.
128
+
129
+ Examples
130
+ --------
131
+ >>> G = nx.petersen_graph()
132
+ >>> nx.approximation.min_edge_dominating_set(G)
133
+ {(0, 1), (4, 9), (6, 8), (5, 7), (2, 3)}
134
+
135
+ Raises
136
+ ------
137
+ ValueError
138
+ If the input graph `G` is empty.
139
+
140
+ Notes
141
+ -----
142
+ The algorithm computes an approximate solution to the edge dominating set
143
+ problem. The result is no more than 2 * OPT in terms of size of the set.
144
+ Runtime of the algorithm is $O(|E|)$.
145
+ """
146
+ if not G:
147
+ raise ValueError("Expected non-empty NetworkX graph!")
148
+ return maximal_matching(G)
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py ADDED
@@ -0,0 +1,369 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Fast approximation for k-component structure
2
+ """
3
+ import itertools
4
+ from collections import defaultdict
5
+ from collections.abc import Mapping
6
+ from functools import cached_property
7
+
8
+ import networkx as nx
9
+ from networkx.algorithms.approximation import local_node_connectivity
10
+ from networkx.exception import NetworkXError
11
+ from networkx.utils import not_implemented_for
12
+
13
+ __all__ = ["k_components"]
14
+
15
+
16
+ @not_implemented_for("directed")
17
+ @nx._dispatchable(name="approximate_k_components")
18
+ def k_components(G, min_density=0.95):
19
+ r"""Returns the approximate k-component structure of a graph G.
20
+
21
+ A `k`-component is a maximal subgraph of a graph G that has, at least,
22
+ node connectivity `k`: we need to remove at least `k` nodes to break it
23
+ into more components. `k`-components have an inherent hierarchical
24
+ structure because they are nested in terms of connectivity: a connected
25
+ graph can contain several 2-components, each of which can contain
26
+ one or more 3-components, and so forth.
27
+
28
+ This implementation is based on the fast heuristics to approximate
29
+ the `k`-component structure of a graph [1]_. Which, in turn, it is based on
30
+ a fast approximation algorithm for finding good lower bounds of the number
31
+ of node independent paths between two nodes [2]_.
32
+
33
+ Parameters
34
+ ----------
35
+ G : NetworkX graph
36
+ Undirected graph
37
+
38
+ min_density : Float
39
+ Density relaxation threshold. Default value 0.95
40
+
41
+ Returns
42
+ -------
43
+ k_components : dict
44
+ Dictionary with connectivity level `k` as key and a list of
45
+ sets of nodes that form a k-component of level `k` as values.
46
+
47
+ Raises
48
+ ------
49
+ NetworkXNotImplemented
50
+ If G is directed.
51
+
52
+ Examples
53
+ --------
54
+ >>> # Petersen graph has 10 nodes and it is triconnected, thus all
55
+ >>> # nodes are in a single component on all three connectivity levels
56
+ >>> from networkx.algorithms import approximation as apxa
57
+ >>> G = nx.petersen_graph()
58
+ >>> k_components = apxa.k_components(G)
59
+
60
+ Notes
61
+ -----
62
+ The logic of the approximation algorithm for computing the `k`-component
63
+ structure [1]_ is based on repeatedly applying simple and fast algorithms
64
+ for `k`-cores and biconnected components in order to narrow down the
65
+ number of pairs of nodes over which we have to compute White and Newman's
66
+ approximation algorithm for finding node independent paths [2]_. More
67
+ formally, this algorithm is based on Whitney's theorem, which states
68
+ an inclusion relation among node connectivity, edge connectivity, and
69
+ minimum degree for any graph G. This theorem implies that every
70
+ `k`-component is nested inside a `k`-edge-component, which in turn,
71
+ is contained in a `k`-core. Thus, this algorithm computes node independent
72
+ paths among pairs of nodes in each biconnected part of each `k`-core,
73
+ and repeats this procedure for each `k` from 3 to the maximal core number
74
+ of a node in the input graph.
75
+
76
+ Because, in practice, many nodes of the core of level `k` inside a
77
+ bicomponent actually are part of a component of level k, the auxiliary
78
+ graph needed for the algorithm is likely to be very dense. Thus, we use
79
+ a complement graph data structure (see `AntiGraph`) to save memory.
80
+ AntiGraph only stores information of the edges that are *not* present
81
+ in the actual auxiliary graph. When applying algorithms to this
82
+ complement graph data structure, it behaves as if it were the dense
83
+ version.
84
+
85
+ See also
86
+ --------
87
+ k_components
88
+
89
+ References
90
+ ----------
91
+ .. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion:
92
+ Visualization and Heuristics for Fast Computation.
93
+ https://arxiv.org/pdf/1503.04476v1
94
+
95
+ .. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
96
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
97
+ https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind
98
+
99
+ .. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness:
100
+ A hierarchical conception of social groups.
101
+ American Sociological Review 68(1), 103--28.
102
+ https://doi.org/10.2307/3088904
103
+
104
+ """
105
+ # Dictionary with connectivity level (k) as keys and a list of
106
+ # sets of nodes that form a k-component as values
107
+ k_components = defaultdict(list)
108
+ # make a few functions local for speed
109
+ node_connectivity = local_node_connectivity
110
+ k_core = nx.k_core
111
+ core_number = nx.core_number
112
+ biconnected_components = nx.biconnected_components
113
+ combinations = itertools.combinations
114
+ # Exact solution for k = {1,2}
115
+ # There is a linear time algorithm for triconnectivity, if we had an
116
+ # implementation available we could start from k = 4.
117
+ for component in nx.connected_components(G):
118
+ # isolated nodes have connectivity 0
119
+ comp = set(component)
120
+ if len(comp) > 1:
121
+ k_components[1].append(comp)
122
+ for bicomponent in nx.biconnected_components(G):
123
+ # avoid considering dyads as bicomponents
124
+ bicomp = set(bicomponent)
125
+ if len(bicomp) > 2:
126
+ k_components[2].append(bicomp)
127
+ # There is no k-component of k > maximum core number
128
+ # \kappa(G) <= \lambda(G) <= \delta(G)
129
+ g_cnumber = core_number(G)
130
+ max_core = max(g_cnumber.values())
131
+ for k in range(3, max_core + 1):
132
+ C = k_core(G, k, core_number=g_cnumber)
133
+ for nodes in biconnected_components(C):
134
+ # Build a subgraph SG induced by the nodes that are part of
135
+ # each biconnected component of the k-core subgraph C.
136
+ if len(nodes) < k:
137
+ continue
138
+ SG = G.subgraph(nodes)
139
+ # Build auxiliary graph
140
+ H = _AntiGraph()
141
+ H.add_nodes_from(SG.nodes())
142
+ for u, v in combinations(SG, 2):
143
+ K = node_connectivity(SG, u, v, cutoff=k)
144
+ if k > K:
145
+ H.add_edge(u, v)
146
+ for h_nodes in biconnected_components(H):
147
+ if len(h_nodes) <= k:
148
+ continue
149
+ SH = H.subgraph(h_nodes)
150
+ for Gc in _cliques_heuristic(SG, SH, k, min_density):
151
+ for k_nodes in biconnected_components(Gc):
152
+ Gk = nx.k_core(SG.subgraph(k_nodes), k)
153
+ if len(Gk) <= k:
154
+ continue
155
+ k_components[k].append(set(Gk))
156
+ return k_components
157
+
158
+
159
+ def _cliques_heuristic(G, H, k, min_density):
160
+ h_cnumber = nx.core_number(H)
161
+ for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)):
162
+ cands = {n for n, c in h_cnumber.items() if c == c_value}
163
+ # Skip checking for overlap for the highest core value
164
+ if i == 0:
165
+ overlap = False
166
+ else:
167
+ overlap = set.intersection(
168
+ *[{x for x in H[n] if x not in cands} for n in cands]
169
+ )
170
+ if overlap and len(overlap) < k:
171
+ SH = H.subgraph(cands | overlap)
172
+ else:
173
+ SH = H.subgraph(cands)
174
+ sh_cnumber = nx.core_number(SH)
175
+ SG = nx.k_core(G.subgraph(SH), k)
176
+ while not (_same(sh_cnumber) and nx.density(SH) >= min_density):
177
+ # This subgraph must be writable => .copy()
178
+ SH = H.subgraph(SG).copy()
179
+ if len(SH) <= k:
180
+ break
181
+ sh_cnumber = nx.core_number(SH)
182
+ sh_deg = dict(SH.degree())
183
+ min_deg = min(sh_deg.values())
184
+ SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg)
185
+ SG = nx.k_core(G.subgraph(SH), k)
186
+ else:
187
+ yield SG
188
+
189
+
190
+ def _same(measure, tol=0):
191
+ vals = set(measure.values())
192
+ if (max(vals) - min(vals)) <= tol:
193
+ return True
194
+ return False
195
+
196
+
197
+ class _AntiGraph(nx.Graph):
198
+ """
199
+ Class for complement graphs.
200
+
201
+ The main goal is to be able to work with big and dense graphs with
202
+ a low memory footprint.
203
+
204
+ In this class you add the edges that *do not exist* in the dense graph,
205
+ the report methods of the class return the neighbors, the edges and
206
+ the degree as if it was the dense graph. Thus it's possible to use
207
+ an instance of this class with some of NetworkX functions. In this
208
+ case we only use k-core, connected_components, and biconnected_components.
209
+ """
210
+
211
+ all_edge_dict = {"weight": 1}
212
+
213
+ def single_edge_dict(self):
214
+ return self.all_edge_dict
215
+
216
+ edge_attr_dict_factory = single_edge_dict # type: ignore[assignment]
217
+
218
+ def __getitem__(self, n):
219
+ """Returns a dict of neighbors of node n in the dense graph.
220
+
221
+ Parameters
222
+ ----------
223
+ n : node
224
+ A node in the graph.
225
+
226
+ Returns
227
+ -------
228
+ adj_dict : dictionary
229
+ The adjacency dictionary for nodes connected to n.
230
+
231
+ """
232
+ all_edge_dict = self.all_edge_dict
233
+ return {
234
+ node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n}
235
+ }
236
+
237
+ def neighbors(self, n):
238
+ """Returns an iterator over all neighbors of node n in the
239
+ dense graph.
240
+ """
241
+ try:
242
+ return iter(set(self._adj) - set(self._adj[n]) - {n})
243
+ except KeyError as err:
244
+ raise NetworkXError(f"The node {n} is not in the graph.") from err
245
+
246
+ class AntiAtlasView(Mapping):
247
+ """An adjacency inner dict for AntiGraph"""
248
+
249
+ def __init__(self, graph, node):
250
+ self._graph = graph
251
+ self._atlas = graph._adj[node]
252
+ self._node = node
253
+
254
+ def __len__(self):
255
+ return len(self._graph) - len(self._atlas) - 1
256
+
257
+ def __iter__(self):
258
+ return (n for n in self._graph if n not in self._atlas and n != self._node)
259
+
260
+ def __getitem__(self, nbr):
261
+ nbrs = set(self._graph._adj) - set(self._atlas) - {self._node}
262
+ if nbr in nbrs:
263
+ return self._graph.all_edge_dict
264
+ raise KeyError(nbr)
265
+
266
+ class AntiAdjacencyView(AntiAtlasView):
267
+ """An adjacency outer dict for AntiGraph"""
268
+
269
+ def __init__(self, graph):
270
+ self._graph = graph
271
+ self._atlas = graph._adj
272
+
273
+ def __len__(self):
274
+ return len(self._atlas)
275
+
276
+ def __iter__(self):
277
+ return iter(self._graph)
278
+
279
+ def __getitem__(self, node):
280
+ if node not in self._graph:
281
+ raise KeyError(node)
282
+ return self._graph.AntiAtlasView(self._graph, node)
283
+
284
+ @cached_property
285
+ def adj(self):
286
+ return self.AntiAdjacencyView(self)
287
+
288
+ def subgraph(self, nodes):
289
+ """This subgraph method returns a full AntiGraph. Not a View"""
290
+ nodes = set(nodes)
291
+ G = _AntiGraph()
292
+ G.add_nodes_from(nodes)
293
+ for n in G:
294
+ Gnbrs = G.adjlist_inner_dict_factory()
295
+ G._adj[n] = Gnbrs
296
+ for nbr, d in self._adj[n].items():
297
+ if nbr in G._adj:
298
+ Gnbrs[nbr] = d
299
+ G._adj[nbr][n] = d
300
+ G.graph = self.graph
301
+ return G
302
+
303
+ class AntiDegreeView(nx.reportviews.DegreeView):
304
+ def __iter__(self):
305
+ all_nodes = set(self._succ)
306
+ for n in self._nodes:
307
+ nbrs = all_nodes - set(self._succ[n]) - {n}
308
+ yield (n, len(nbrs))
309
+
310
+ def __getitem__(self, n):
311
+ nbrs = set(self._succ) - set(self._succ[n]) - {n}
312
+ # AntiGraph is a ThinGraph so all edges have weight 1
313
+ return len(nbrs) + (n in nbrs)
314
+
315
+ @cached_property
316
+ def degree(self):
317
+ """Returns an iterator for (node, degree) and degree for single node.
318
+
319
+ The node degree is the number of edges adjacent to the node.
320
+
321
+ Parameters
322
+ ----------
323
+ nbunch : iterable container, optional (default=all nodes)
324
+ A container of nodes. The container will be iterated
325
+ through once.
326
+
327
+ weight : string or None, optional (default=None)
328
+ The edge attribute that holds the numerical value used
329
+ as a weight. If None, then each edge has weight 1.
330
+ The degree is the sum of the edge weights adjacent to the node.
331
+
332
+ Returns
333
+ -------
334
+ deg:
335
+ Degree of the node, if a single node is passed as argument.
336
+ nd_iter : an iterator
337
+ The iterator returns two-tuples of (node, degree).
338
+
339
+ See Also
340
+ --------
341
+ degree
342
+
343
+ Examples
344
+ --------
345
+ >>> G = nx.path_graph(4)
346
+ >>> G.degree(0) # node 0 with degree 1
347
+ 1
348
+ >>> list(G.degree([0, 1]))
349
+ [(0, 1), (1, 2)]
350
+
351
+ """
352
+ return self.AntiDegreeView(self)
353
+
354
+ def adjacency(self):
355
+ """Returns an iterator of (node, adjacency set) tuples for all nodes
356
+ in the dense graph.
357
+
358
+ This is the fastest way to look at every edge.
359
+ For directed graphs, only outgoing adjacencies are included.
360
+
361
+ Returns
362
+ -------
363
+ adj_iter : iterator
364
+ An iterator of (node, adjacency set) for all nodes in
365
+ the graph.
366
+
367
+ """
368
+ for n in self._adj:
369
+ yield (n, set(self._adj) - set(self._adj[n]) - {n})
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/matching.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ **************
3
+ Graph Matching
4
+ **************
5
+
6
+ Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
7
+ edges; that is, no two edges share a common vertex.
8
+
9
+ `Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
10
+ """
11
+ import networkx as nx
12
+
13
+ __all__ = ["min_maximal_matching"]
14
+
15
+
16
+ @nx._dispatchable
17
+ def min_maximal_matching(G):
18
+ r"""Returns the minimum maximal matching of G. That is, out of all maximal
19
+ matchings of the graph G, the smallest is returned.
20
+
21
+ Parameters
22
+ ----------
23
+ G : NetworkX graph
24
+ Undirected graph
25
+
26
+ Returns
27
+ -------
28
+ min_maximal_matching : set
29
+ Returns a set of edges such that no two edges share a common endpoint
30
+ and every edge not in the set shares some common endpoint in the set.
31
+ Cardinality will be 2*OPT in the worst case.
32
+
33
+ Notes
34
+ -----
35
+ The algorithm computes an approximate solution for the minimum maximal
36
+ cardinality matching problem. The solution is no more than 2 * OPT in size.
37
+ Runtime is $O(|E|)$.
38
+
39
+ References
40
+ ----------
41
+ .. [1] Vazirani, Vijay Approximation Algorithms (2001)
42
+ """
43
+ return nx.maximal_matching(G)
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/maxcut.py ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.utils.decorators import not_implemented_for, py_random_state
3
+
4
+ __all__ = ["randomized_partitioning", "one_exchange"]
5
+
6
+
7
+ @not_implemented_for("directed")
8
+ @not_implemented_for("multigraph")
9
+ @py_random_state(1)
10
+ @nx._dispatchable(edge_attrs="weight")
11
+ def randomized_partitioning(G, seed=None, p=0.5, weight=None):
12
+ """Compute a random partitioning of the graph nodes and its cut value.
13
+
14
+ A partitioning is calculated by observing each node
15
+ and deciding to add it to the partition with probability `p`,
16
+ returning a random cut and its corresponding value (the
17
+ sum of weights of edges connecting different partitions).
18
+
19
+ Parameters
20
+ ----------
21
+ G : NetworkX graph
22
+
23
+ seed : integer, random_state, or None (default)
24
+ Indicator of random number generation state.
25
+ See :ref:`Randomness<randomness>`.
26
+
27
+ p : scalar
28
+ Probability for each node to be part of the first partition.
29
+ Should be in [0,1]
30
+
31
+ weight : object
32
+ Edge attribute key to use as weight. If not specified, edges
33
+ have weight one.
34
+
35
+ Returns
36
+ -------
37
+ cut_size : scalar
38
+ Value of the minimum cut.
39
+
40
+ partition : pair of node sets
41
+ A partitioning of the nodes that defines a minimum cut.
42
+
43
+ Examples
44
+ --------
45
+ >>> G = nx.complete_graph(5)
46
+ >>> cut_size, partition = nx.approximation.randomized_partitioning(G, seed=1)
47
+ >>> cut_size
48
+ 6
49
+ >>> partition
50
+ ({0, 3, 4}, {1, 2})
51
+
52
+ Raises
53
+ ------
54
+ NetworkXNotImplemented
55
+ If the graph is directed or is a multigraph.
56
+ """
57
+ cut = {node for node in G.nodes() if seed.random() < p}
58
+ cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
59
+ partition = (cut, G.nodes - cut)
60
+ return cut_size, partition
61
+
62
+
63
+ def _swap_node_partition(cut, node):
64
+ return cut - {node} if node in cut else cut.union({node})
65
+
66
+
67
+ @not_implemented_for("directed")
68
+ @not_implemented_for("multigraph")
69
+ @py_random_state(2)
70
+ @nx._dispatchable(edge_attrs="weight")
71
+ def one_exchange(G, initial_cut=None, seed=None, weight=None):
72
+ """Compute a partitioning of the graphs nodes and the corresponding cut value.
73
+
74
+ Use a greedy one exchange strategy to find a locally maximal cut
75
+ and its value, it works by finding the best node (one that gives
76
+ the highest gain to the cut value) to add to the current cut
77
+ and repeats this process until no improvement can be made.
78
+
79
+ Parameters
80
+ ----------
81
+ G : networkx Graph
82
+ Graph to find a maximum cut for.
83
+
84
+ initial_cut : set
85
+ Cut to use as a starting point. If not supplied the algorithm
86
+ starts with an empty cut.
87
+
88
+ seed : integer, random_state, or None (default)
89
+ Indicator of random number generation state.
90
+ See :ref:`Randomness<randomness>`.
91
+
92
+ weight : object
93
+ Edge attribute key to use as weight. If not specified, edges
94
+ have weight one.
95
+
96
+ Returns
97
+ -------
98
+ cut_value : scalar
99
+ Value of the maximum cut.
100
+
101
+ partition : pair of node sets
102
+ A partitioning of the nodes that defines a maximum cut.
103
+
104
+ Examples
105
+ --------
106
+ >>> G = nx.complete_graph(5)
107
+ >>> curr_cut_size, partition = nx.approximation.one_exchange(G, seed=1)
108
+ >>> curr_cut_size
109
+ 6
110
+ >>> partition
111
+ ({0, 2}, {1, 3, 4})
112
+
113
+ Raises
114
+ ------
115
+ NetworkXNotImplemented
116
+ If the graph is directed or is a multigraph.
117
+ """
118
+ if initial_cut is None:
119
+ initial_cut = set()
120
+ cut = set(initial_cut)
121
+ current_cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
122
+ while True:
123
+ nodes = list(G.nodes())
124
+ # Shuffling the nodes ensures random tie-breaks in the following call to max
125
+ seed.shuffle(nodes)
126
+ best_node_to_swap = max(
127
+ nodes,
128
+ key=lambda v: nx.algorithms.cut_size(
129
+ G, _swap_node_partition(cut, v), weight=weight
130
+ ),
131
+ default=None,
132
+ )
133
+ potential_cut = _swap_node_partition(cut, best_node_to_swap)
134
+ potential_cut_size = nx.algorithms.cut_size(G, potential_cut, weight=weight)
135
+
136
+ if potential_cut_size > current_cut_size:
137
+ cut = potential_cut
138
+ current_cut_size = potential_cut_size
139
+ else:
140
+ break
141
+
142
+ partition = (cut, G.nodes - cut)
143
+ return current_cut_size, partition
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/ramsey.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Ramsey numbers.
3
+ """
4
+ import networkx as nx
5
+ from networkx.utils import not_implemented_for
6
+
7
+ from ...utils import arbitrary_element
8
+
9
+ __all__ = ["ramsey_R2"]
10
+
11
+
12
+ @not_implemented_for("directed")
13
+ @not_implemented_for("multigraph")
14
+ @nx._dispatchable
15
+ def ramsey_R2(G):
16
+ r"""Compute the largest clique and largest independent set in `G`.
17
+
18
+ This can be used to estimate bounds for the 2-color
19
+ Ramsey number `R(2;s,t)` for `G`.
20
+
21
+ This is a recursive implementation which could run into trouble
22
+ for large recursions. Note that self-loop edges are ignored.
23
+
24
+ Parameters
25
+ ----------
26
+ G : NetworkX graph
27
+ Undirected graph
28
+
29
+ Returns
30
+ -------
31
+ max_pair : (set, set) tuple
32
+ Maximum clique, Maximum independent set.
33
+
34
+ Raises
35
+ ------
36
+ NetworkXNotImplemented
37
+ If the graph is directed or is a multigraph.
38
+ """
39
+ if not G:
40
+ return set(), set()
41
+
42
+ node = arbitrary_element(G)
43
+ nbrs = (nbr for nbr in nx.all_neighbors(G, node) if nbr != node)
44
+ nnbrs = nx.non_neighbors(G, node)
45
+ c_1, i_1 = ramsey_R2(G.subgraph(nbrs).copy())
46
+ c_2, i_2 = ramsey_R2(G.subgraph(nnbrs).copy())
47
+
48
+ c_1.add(node)
49
+ i_2.add(node)
50
+ # Choose the larger of the two cliques and the larger of the two
51
+ # independent sets, according to cardinality.
52
+ return max(c_1, c_2, key=len), max(i_1, i_2, key=len)
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/steinertree.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from itertools import chain
2
+
3
+ import networkx as nx
4
+ from networkx.utils import not_implemented_for, pairwise
5
+
6
+ __all__ = ["metric_closure", "steiner_tree"]
7
+
8
+
9
+ @not_implemented_for("directed")
10
+ @nx._dispatchable(edge_attrs="weight", returns_graph=True)
11
+ def metric_closure(G, weight="weight"):
12
+ """Return the metric closure of a graph.
13
+
14
+ The metric closure of a graph *G* is the complete graph in which each edge
15
+ is weighted by the shortest path distance between the nodes in *G* .
16
+
17
+ Parameters
18
+ ----------
19
+ G : NetworkX graph
20
+
21
+ Returns
22
+ -------
23
+ NetworkX graph
24
+ Metric closure of the graph `G`.
25
+
26
+ """
27
+ M = nx.Graph()
28
+
29
+ Gnodes = set(G)
30
+
31
+ # check for connected graph while processing first node
32
+ all_paths_iter = nx.all_pairs_dijkstra(G, weight=weight)
33
+ u, (distance, path) = next(all_paths_iter)
34
+ if Gnodes - set(distance):
35
+ msg = "G is not a connected graph. metric_closure is not defined."
36
+ raise nx.NetworkXError(msg)
37
+ Gnodes.remove(u)
38
+ for v in Gnodes:
39
+ M.add_edge(u, v, distance=distance[v], path=path[v])
40
+
41
+ # first node done -- now process the rest
42
+ for u, (distance, path) in all_paths_iter:
43
+ Gnodes.remove(u)
44
+ for v in Gnodes:
45
+ M.add_edge(u, v, distance=distance[v], path=path[v])
46
+
47
+ return M
48
+
49
+
50
+ def _mehlhorn_steiner_tree(G, terminal_nodes, weight):
51
+ paths = nx.multi_source_dijkstra_path(G, terminal_nodes)
52
+
53
+ d_1 = {}
54
+ s = {}
55
+ for v in G.nodes():
56
+ s[v] = paths[v][0]
57
+ d_1[(v, s[v])] = len(paths[v]) - 1
58
+
59
+ # G1-G4 names match those from the Mehlhorn 1988 paper.
60
+ G_1_prime = nx.Graph()
61
+ for u, v, data in G.edges(data=True):
62
+ su, sv = s[u], s[v]
63
+ weight_here = d_1[(u, su)] + data.get(weight, 1) + d_1[(v, sv)]
64
+ if not G_1_prime.has_edge(su, sv):
65
+ G_1_prime.add_edge(su, sv, weight=weight_here)
66
+ else:
67
+ new_weight = min(weight_here, G_1_prime[su][sv]["weight"])
68
+ G_1_prime.add_edge(su, sv, weight=new_weight)
69
+
70
+ G_2 = nx.minimum_spanning_edges(G_1_prime, data=True)
71
+
72
+ G_3 = nx.Graph()
73
+ for u, v, d in G_2:
74
+ path = nx.shortest_path(G, u, v, weight)
75
+ for n1, n2 in pairwise(path):
76
+ G_3.add_edge(n1, n2)
77
+
78
+ G_3_mst = list(nx.minimum_spanning_edges(G_3, data=False))
79
+ if G.is_multigraph():
80
+ G_3_mst = (
81
+ (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in G_3_mst
82
+ )
83
+ G_4 = G.edge_subgraph(G_3_mst).copy()
84
+ _remove_nonterminal_leaves(G_4, terminal_nodes)
85
+ return G_4.edges()
86
+
87
+
88
+ def _kou_steiner_tree(G, terminal_nodes, weight):
89
+ # H is the subgraph induced by terminal_nodes in the metric closure M of G.
90
+ M = metric_closure(G, weight=weight)
91
+ H = M.subgraph(terminal_nodes)
92
+
93
+ # Use the 'distance' attribute of each edge provided by M.
94
+ mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True)
95
+
96
+ # Create an iterator over each edge in each shortest path; repeats are okay
97
+ mst_all_edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges)
98
+ if G.is_multigraph():
99
+ mst_all_edges = (
100
+ (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight]))
101
+ for u, v in mst_all_edges
102
+ )
103
+
104
+ # Find the MST again, over this new set of edges
105
+ G_S = G.edge_subgraph(mst_all_edges)
106
+ T_S = nx.minimum_spanning_edges(G_S, weight="weight", data=False)
107
+
108
+ # Leaf nodes that are not terminal might still remain; remove them here
109
+ T_H = G.edge_subgraph(T_S).copy()
110
+ _remove_nonterminal_leaves(T_H, terminal_nodes)
111
+
112
+ return T_H.edges()
113
+
114
+
115
+ def _remove_nonterminal_leaves(G, terminals):
116
+ terminals_set = set(terminals)
117
+ for n in list(G.nodes):
118
+ if n not in terminals_set and G.degree(n) == 1:
119
+ G.remove_node(n)
120
+
121
+
122
+ ALGORITHMS = {
123
+ "kou": _kou_steiner_tree,
124
+ "mehlhorn": _mehlhorn_steiner_tree,
125
+ }
126
+
127
+
128
+ @not_implemented_for("directed")
129
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
130
+ def steiner_tree(G, terminal_nodes, weight="weight", method=None):
131
+ r"""Return an approximation to the minimum Steiner tree of a graph.
132
+
133
+ The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes` (also *S*)
134
+ is a tree within `G` that spans those nodes and has minimum size (sum of
135
+ edge weights) among all such trees.
136
+
137
+ The approximation algorithm is specified with the `method` keyword
138
+ argument. All three available algorithms produce a tree whose weight is
139
+ within a ``(2 - (2 / l))`` factor of the weight of the optimal Steiner tree,
140
+ where ``l`` is the minimum number of leaf nodes across all possible Steiner
141
+ trees.
142
+
143
+ * ``"kou"`` [2]_ (runtime $O(|S| |V|^2)$) computes the minimum spanning tree of
144
+ the subgraph of the metric closure of *G* induced by the terminal nodes,
145
+ where the metric closure of *G* is the complete graph in which each edge is
146
+ weighted by the shortest path distance between the nodes in *G*.
147
+
148
+ * ``"mehlhorn"`` [3]_ (runtime $O(|E|+|V|\log|V|)$) modifies Kou et al.'s
149
+ algorithm, beginning by finding the closest terminal node for each
150
+ non-terminal. This data is used to create a complete graph containing only
151
+ the terminal nodes, in which edge is weighted with the shortest path
152
+ distance between them. The algorithm then proceeds in the same way as Kou
153
+ et al..
154
+
155
+ Parameters
156
+ ----------
157
+ G : NetworkX graph
158
+
159
+ terminal_nodes : list
160
+ A list of terminal nodes for which minimum steiner tree is
161
+ to be found.
162
+
163
+ weight : string (default = 'weight')
164
+ Use the edge attribute specified by this string as the edge weight.
165
+ Any edge attribute not present defaults to 1.
166
+
167
+ method : string, optional (default = 'mehlhorn')
168
+ The algorithm to use to approximate the Steiner tree.
169
+ Supported options: 'kou', 'mehlhorn'.
170
+ Other inputs produce a ValueError.
171
+
172
+ Returns
173
+ -------
174
+ NetworkX graph
175
+ Approximation to the minimum steiner tree of `G` induced by
176
+ `terminal_nodes` .
177
+
178
+ Raises
179
+ ------
180
+ NetworkXNotImplemented
181
+ If `G` is directed.
182
+
183
+ ValueError
184
+ If the specified `method` is not supported.
185
+
186
+ Notes
187
+ -----
188
+ For multigraphs, the edge between two nodes with minimum weight is the
189
+ edge put into the Steiner tree.
190
+
191
+
192
+ References
193
+ ----------
194
+ .. [1] Steiner_tree_problem on Wikipedia.
195
+ https://en.wikipedia.org/wiki/Steiner_tree_problem
196
+ .. [2] Kou, L., G. Markowsky, and L. Berman. 1981.
197
+ ‘A Fast Algorithm for Steiner Trees’.
198
+ Acta Informatica 15 (2): 141–45.
199
+ https://doi.org/10.1007/BF00288961.
200
+ .. [3] Mehlhorn, Kurt. 1988.
201
+ ‘A Faster Approximation Algorithm for the Steiner Problem in Graphs’.
202
+ Information Processing Letters 27 (3): 125–28.
203
+ https://doi.org/10.1016/0020-0190(88)90066-X.
204
+ """
205
+ if method is None:
206
+ method = "mehlhorn"
207
+
208
+ try:
209
+ algo = ALGORITHMS[method]
210
+ except KeyError as e:
211
+ raise ValueError(f"{method} is not a valid choice for an algorithm.") from e
212
+
213
+ edges = algo(G, terminal_nodes, weight)
214
+ # For multigraph we should add the minimal weight edge keys
215
+ if G.is_multigraph():
216
+ edges = (
217
+ (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in edges
218
+ )
219
+ T = G.edge_subgraph(edges)
220
+ return T
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (204 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-310.pyc ADDED
Binary file (4.41 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-310.pyc ADDED
Binary file (5.84 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-310.pyc ADDED
Binary file (9.63 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-310.pyc ADDED
Binary file (3.03 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py ADDED
@@ -0,0 +1,1498 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ =================================
3
+ Travelling Salesman Problem (TSP)
4
+ =================================
5
+
6
+ Implementation of approximate algorithms
7
+ for solving and approximating the TSP problem.
8
+
9
+ Categories of algorithms which are implemented:
10
+
11
+ - Christofides (provides a 3/2-approximation of TSP)
12
+ - Greedy
13
+ - Simulated Annealing (SA)
14
+ - Threshold Accepting (TA)
15
+ - Asadpour Asymmetric Traveling Salesman Algorithm
16
+
17
+ The Travelling Salesman Problem tries to find, given the weight
18
+ (distance) between all points where a salesman has to visit, the
19
+ route so that:
20
+
21
+ - The total distance (cost) which the salesman travels is minimized.
22
+ - The salesman returns to the starting point.
23
+ - Note that for a complete graph, the salesman visits each point once.
24
+
25
+ The function `travelling_salesman_problem` allows for incomplete
26
+ graphs by finding all-pairs shortest paths, effectively converting
27
+ the problem to a complete graph problem. It calls one of the
28
+ approximate methods on that problem and then converts the result
29
+ back to the original graph using the previously found shortest paths.
30
+
31
+ TSP is an NP-hard problem in combinatorial optimization,
32
+ important in operations research and theoretical computer science.
33
+
34
+ http://en.wikipedia.org/wiki/Travelling_salesman_problem
35
+ """
36
+ import math
37
+
38
+ import networkx as nx
39
+ from networkx.algorithms.tree.mst import random_spanning_tree
40
+ from networkx.utils import not_implemented_for, pairwise, py_random_state
41
+
42
+ __all__ = [
43
+ "traveling_salesman_problem",
44
+ "christofides",
45
+ "asadpour_atsp",
46
+ "greedy_tsp",
47
+ "simulated_annealing_tsp",
48
+ "threshold_accepting_tsp",
49
+ ]
50
+
51
+
52
+ def swap_two_nodes(soln, seed):
53
+ """Swap two nodes in `soln` to give a neighbor solution.
54
+
55
+ Parameters
56
+ ----------
57
+ soln : list of nodes
58
+ Current cycle of nodes
59
+
60
+ seed : integer, random_state, or None (default)
61
+ Indicator of random number generation state.
62
+ See :ref:`Randomness<randomness>`.
63
+
64
+ Returns
65
+ -------
66
+ list
67
+ The solution after move is applied. (A neighbor solution.)
68
+
69
+ Notes
70
+ -----
71
+ This function assumes that the incoming list `soln` is a cycle
72
+ (that the first and last element are the same) and also that
73
+ we don't want any move to change the first node in the list
74
+ (and thus not the last node either).
75
+
76
+ The input list is changed as well as returned. Make a copy if needed.
77
+
78
+ See Also
79
+ --------
80
+ move_one_node
81
+ """
82
+ a, b = seed.sample(range(1, len(soln) - 1), k=2)
83
+ soln[a], soln[b] = soln[b], soln[a]
84
+ return soln
85
+
86
+
87
+ def move_one_node(soln, seed):
88
+ """Move one node to another position to give a neighbor solution.
89
+
90
+ The node to move and the position to move to are chosen randomly.
91
+ The first and last nodes are left untouched as soln must be a cycle
92
+ starting at that node.
93
+
94
+ Parameters
95
+ ----------
96
+ soln : list of nodes
97
+ Current cycle of nodes
98
+
99
+ seed : integer, random_state, or None (default)
100
+ Indicator of random number generation state.
101
+ See :ref:`Randomness<randomness>`.
102
+
103
+ Returns
104
+ -------
105
+ list
106
+ The solution after move is applied. (A neighbor solution.)
107
+
108
+ Notes
109
+ -----
110
+ This function assumes that the incoming list `soln` is a cycle
111
+ (that the first and last element are the same) and also that
112
+ we don't want any move to change the first node in the list
113
+ (and thus not the last node either).
114
+
115
+ The input list is changed as well as returned. Make a copy if needed.
116
+
117
+ See Also
118
+ --------
119
+ swap_two_nodes
120
+ """
121
+ a, b = seed.sample(range(1, len(soln) - 1), k=2)
122
+ soln.insert(b, soln.pop(a))
123
+ return soln
124
+
125
+
126
+ @not_implemented_for("directed")
127
+ @nx._dispatchable(edge_attrs="weight")
128
+ def christofides(G, weight="weight", tree=None):
129
+ """Approximate a solution of the traveling salesman problem
130
+
131
+ Compute a 3/2-approximation of the traveling salesman problem
132
+ in a complete undirected graph using Christofides [1]_ algorithm.
133
+
134
+ Parameters
135
+ ----------
136
+ G : Graph
137
+ `G` should be a complete weighted undirected graph.
138
+ The distance between all pairs of nodes should be included.
139
+
140
+ weight : string, optional (default="weight")
141
+ Edge data key corresponding to the edge weight.
142
+ If any edge does not have this attribute the weight is set to 1.
143
+
144
+ tree : NetworkX graph or None (default: None)
145
+ A minimum spanning tree of G. Or, if None, the minimum spanning
146
+ tree is computed using :func:`networkx.minimum_spanning_tree`
147
+
148
+ Returns
149
+ -------
150
+ list
151
+ List of nodes in `G` along a cycle with a 3/2-approximation of
152
+ the minimal Hamiltonian cycle.
153
+
154
+ References
155
+ ----------
156
+ .. [1] Christofides, Nicos. "Worst-case analysis of a new heuristic for
157
+ the travelling salesman problem." No. RR-388. Carnegie-Mellon Univ
158
+ Pittsburgh Pa Management Sciences Research Group, 1976.
159
+ """
160
+ # Remove selfloops if necessary
161
+ loop_nodes = nx.nodes_with_selfloops(G)
162
+ try:
163
+ node = next(loop_nodes)
164
+ except StopIteration:
165
+ pass
166
+ else:
167
+ G = G.copy()
168
+ G.remove_edge(node, node)
169
+ G.remove_edges_from((n, n) for n in loop_nodes)
170
+ # Check that G is a complete graph
171
+ N = len(G) - 1
172
+ # This check ignores selfloops which is what we want here.
173
+ if any(len(nbrdict) != N for n, nbrdict in G.adj.items()):
174
+ raise nx.NetworkXError("G must be a complete graph.")
175
+
176
+ if tree is None:
177
+ tree = nx.minimum_spanning_tree(G, weight=weight)
178
+ L = G.copy()
179
+ L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)])
180
+ MG = nx.MultiGraph()
181
+ MG.add_edges_from(tree.edges)
182
+ edges = nx.min_weight_matching(L, weight=weight)
183
+ MG.add_edges_from(edges)
184
+ return _shortcutting(nx.eulerian_circuit(MG))
185
+
186
+
187
+ def _shortcutting(circuit):
188
+ """Remove duplicate nodes in the path"""
189
+ nodes = []
190
+ for u, v in circuit:
191
+ if v in nodes:
192
+ continue
193
+ if not nodes:
194
+ nodes.append(u)
195
+ nodes.append(v)
196
+ nodes.append(nodes[0])
197
+ return nodes
198
+
199
+
200
+ @nx._dispatchable(edge_attrs="weight")
201
+ def traveling_salesman_problem(
202
+ G, weight="weight", nodes=None, cycle=True, method=None, **kwargs
203
+ ):
204
+ """Find the shortest path in `G` connecting specified nodes
205
+
206
+ This function allows approximate solution to the traveling salesman
207
+ problem on networks that are not complete graphs and/or where the
208
+ salesman does not need to visit all nodes.
209
+
210
+ This function proceeds in two steps. First, it creates a complete
211
+ graph using the all-pairs shortest_paths between nodes in `nodes`.
212
+ Edge weights in the new graph are the lengths of the paths
213
+ between each pair of nodes in the original graph.
214
+ Second, an algorithm (default: `christofides` for undirected and
215
+ `asadpour_atsp` for directed) is used to approximate the minimal Hamiltonian
216
+ cycle on this new graph. The available algorithms are:
217
+
218
+ - christofides
219
+ - greedy_tsp
220
+ - simulated_annealing_tsp
221
+ - threshold_accepting_tsp
222
+ - asadpour_atsp
223
+
224
+ Once the Hamiltonian Cycle is found, this function post-processes to
225
+ accommodate the structure of the original graph. If `cycle` is ``False``,
226
+ the biggest weight edge is removed to make a Hamiltonian path.
227
+ Then each edge on the new complete graph used for that analysis is
228
+ replaced by the shortest_path between those nodes on the original graph.
229
+ If the input graph `G` includes edges with weights that do not adhere to
230
+ the triangle inequality, such as when `G` is not a complete graph (i.e
231
+ length of non-existent edges is infinity), then the returned path may
232
+ contain some repeating nodes (other than the starting node).
233
+
234
+ Parameters
235
+ ----------
236
+ G : NetworkX graph
237
+ A possibly weighted graph
238
+
239
+ nodes : collection of nodes (default=G.nodes)
240
+ collection (list, set, etc.) of nodes to visit
241
+
242
+ weight : string, optional (default="weight")
243
+ Edge data key corresponding to the edge weight.
244
+ If any edge does not have this attribute the weight is set to 1.
245
+
246
+ cycle : bool (default: True)
247
+ Indicates whether a cycle should be returned, or a path.
248
+ Note: the cycle is the approximate minimal cycle.
249
+ The path simply removes the biggest edge in that cycle.
250
+
251
+ method : function (default: None)
252
+ A function that returns a cycle on all nodes and approximates
253
+ the solution to the traveling salesman problem on a complete
254
+ graph. The returned cycle is then used to find a corresponding
255
+ solution on `G`. `method` should be callable; take inputs
256
+ `G`, and `weight`; and return a list of nodes along the cycle.
257
+
258
+ Provided options include :func:`christofides`, :func:`greedy_tsp`,
259
+ :func:`simulated_annealing_tsp` and :func:`threshold_accepting_tsp`.
260
+
261
+ If `method is None`: use :func:`christofides` for undirected `G` and
262
+ :func:`asadpour_atsp` for directed `G`.
263
+
264
+ **kwargs : dict
265
+ Other keyword arguments to be passed to the `method` function passed in.
266
+
267
+ Returns
268
+ -------
269
+ list
270
+ List of nodes in `G` along a path with an approximation of the minimal
271
+ path through `nodes`.
272
+
273
+ Raises
274
+ ------
275
+ NetworkXError
276
+ If `G` is a directed graph it has to be strongly connected or the
277
+ complete version cannot be generated.
278
+
279
+ Examples
280
+ --------
281
+ >>> tsp = nx.approximation.traveling_salesman_problem
282
+ >>> G = nx.cycle_graph(9)
283
+ >>> G[4][5]["weight"] = 5 # all other weights are 1
284
+ >>> tsp(G, nodes=[3, 6])
285
+ [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3]
286
+ >>> path = tsp(G, cycle=False)
287
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
288
+ True
289
+
290
+ While no longer required, you can still build (curry) your own function
291
+ to provide parameter values to the methods.
292
+
293
+ >>> SA_tsp = nx.approximation.simulated_annealing_tsp
294
+ >>> method = lambda G, weight: SA_tsp(G, "greedy", weight=weight, temp=500)
295
+ >>> path = tsp(G, cycle=False, method=method)
296
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
297
+ True
298
+
299
+ Otherwise, pass other keyword arguments directly into the tsp function.
300
+
301
+ >>> path = tsp(
302
+ ... G,
303
+ ... cycle=False,
304
+ ... method=nx.approximation.simulated_annealing_tsp,
305
+ ... init_cycle="greedy",
306
+ ... temp=500,
307
+ ... )
308
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
309
+ True
310
+ """
311
+ if method is None:
312
+ if G.is_directed():
313
+ method = asadpour_atsp
314
+ else:
315
+ method = christofides
316
+ if nodes is None:
317
+ nodes = list(G.nodes)
318
+
319
+ dist = {}
320
+ path = {}
321
+ for n, (d, p) in nx.all_pairs_dijkstra(G, weight=weight):
322
+ dist[n] = d
323
+ path[n] = p
324
+
325
+ if G.is_directed():
326
+ # If the graph is not strongly connected, raise an exception
327
+ if not nx.is_strongly_connected(G):
328
+ raise nx.NetworkXError("G is not strongly connected")
329
+ GG = nx.DiGraph()
330
+ else:
331
+ GG = nx.Graph()
332
+ for u in nodes:
333
+ for v in nodes:
334
+ if u == v:
335
+ continue
336
+ GG.add_edge(u, v, weight=dist[u][v])
337
+
338
+ best_GG = method(GG, weight=weight, **kwargs)
339
+
340
+ if not cycle:
341
+ # find and remove the biggest edge
342
+ (u, v) = max(pairwise(best_GG), key=lambda x: dist[x[0]][x[1]])
343
+ pos = best_GG.index(u) + 1
344
+ while best_GG[pos] != v:
345
+ pos = best_GG[pos:].index(u) + 1
346
+ best_GG = best_GG[pos:-1] + best_GG[:pos]
347
+
348
+ best_path = []
349
+ for u, v in pairwise(best_GG):
350
+ best_path.extend(path[u][v][:-1])
351
+ best_path.append(v)
352
+ return best_path
353
+
354
+
355
+ @not_implemented_for("undirected")
356
+ @py_random_state(2)
357
+ @nx._dispatchable(edge_attrs="weight", mutates_input=True)
358
+ def asadpour_atsp(G, weight="weight", seed=None, source=None):
359
+ """
360
+ Returns an approximate solution to the traveling salesman problem.
361
+
362
+ This approximate solution is one of the best known approximations for the
363
+ asymmetric traveling salesman problem developed by Asadpour et al,
364
+ [1]_. The algorithm first solves the Held-Karp relaxation to find a lower
365
+ bound for the weight of the cycle. Next, it constructs an exponential
366
+ distribution of undirected spanning trees where the probability of an
367
+ edge being in the tree corresponds to the weight of that edge using a
368
+ maximum entropy rounding scheme. Next we sample that distribution
369
+ $2 \\lceil \\ln n \\rceil$ times and save the minimum sampled tree once the
370
+ direction of the arcs is added back to the edges. Finally, we augment
371
+ then short circuit that graph to find the approximate tour for the
372
+ salesman.
373
+
374
+ Parameters
375
+ ----------
376
+ G : nx.DiGraph
377
+ The graph should be a complete weighted directed graph. The
378
+ distance between all paris of nodes should be included and the triangle
379
+ inequality should hold. That is, the direct edge between any two nodes
380
+ should be the path of least cost.
381
+
382
+ weight : string, optional (default="weight")
383
+ Edge data key corresponding to the edge weight.
384
+ If any edge does not have this attribute the weight is set to 1.
385
+
386
+ seed : integer, random_state, or None (default)
387
+ Indicator of random number generation state.
388
+ See :ref:`Randomness<randomness>`.
389
+
390
+ source : node label (default=`None`)
391
+ If given, return the cycle starting and ending at the given node.
392
+
393
+ Returns
394
+ -------
395
+ cycle : list of nodes
396
+ Returns the cycle (list of nodes) that a salesman can follow to minimize
397
+ the total weight of the trip.
398
+
399
+ Raises
400
+ ------
401
+ NetworkXError
402
+ If `G` is not complete or has less than two nodes, the algorithm raises
403
+ an exception.
404
+
405
+ NetworkXError
406
+ If `source` is not `None` and is not a node in `G`, the algorithm raises
407
+ an exception.
408
+
409
+ NetworkXNotImplemented
410
+ If `G` is an undirected graph.
411
+
412
+ References
413
+ ----------
414
+ .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
415
+ An o(log n/log log n)-approximation algorithm for the asymmetric
416
+ traveling salesman problem, Operations research, 65 (2017),
417
+ pp. 1043–1061
418
+
419
+ Examples
420
+ --------
421
+ >>> import networkx as nx
422
+ >>> import networkx.algorithms.approximation as approx
423
+ >>> G = nx.complete_graph(3, create_using=nx.DiGraph)
424
+ >>> nx.set_edge_attributes(
425
+ ... G, {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1}, "weight"
426
+ ... )
427
+ >>> tour = approx.asadpour_atsp(G, source=0)
428
+ >>> tour
429
+ [0, 2, 1, 0]
430
+ """
431
+ from math import ceil, exp
432
+ from math import log as ln
433
+
434
+ # Check that G is a complete graph
435
+ N = len(G) - 1
436
+ if N < 2:
437
+ raise nx.NetworkXError("G must have at least two nodes")
438
+ # This check ignores selfloops which is what we want here.
439
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
440
+ raise nx.NetworkXError("G is not a complete DiGraph")
441
+ # Check that the source vertex, if given, is in the graph
442
+ if source is not None and source not in G.nodes:
443
+ raise nx.NetworkXError("Given source node not in G.")
444
+
445
+ opt_hk, z_star = held_karp_ascent(G, weight)
446
+
447
+ # Test to see if the ascent method found an integer solution or a fractional
448
+ # solution. If it is integral then z_star is a nx.Graph, otherwise it is
449
+ # a dict
450
+ if not isinstance(z_star, dict):
451
+ # Here we are using the shortcutting method to go from the list of edges
452
+ # returned from eulerian_circuit to a list of nodes
453
+ return _shortcutting(nx.eulerian_circuit(z_star, source=source))
454
+
455
+ # Create the undirected support of z_star
456
+ z_support = nx.MultiGraph()
457
+ for u, v in z_star:
458
+ if (u, v) not in z_support.edges:
459
+ edge_weight = min(G[u][v][weight], G[v][u][weight])
460
+ z_support.add_edge(u, v, **{weight: edge_weight})
461
+
462
+ # Create the exponential distribution of spanning trees
463
+ gamma = spanning_tree_distribution(z_support, z_star)
464
+
465
+ # Write the lambda values to the edges of z_support
466
+ z_support = nx.Graph(z_support)
467
+ lambda_dict = {(u, v): exp(gamma[(u, v)]) for u, v in z_support.edges()}
468
+ nx.set_edge_attributes(z_support, lambda_dict, "weight")
469
+ del gamma, lambda_dict
470
+
471
+ # Sample 2 * ceil( ln(n) ) spanning trees and record the minimum one
472
+ minimum_sampled_tree = None
473
+ minimum_sampled_tree_weight = math.inf
474
+ for _ in range(2 * ceil(ln(G.number_of_nodes()))):
475
+ sampled_tree = random_spanning_tree(z_support, "weight", seed=seed)
476
+ sampled_tree_weight = sampled_tree.size(weight)
477
+ if sampled_tree_weight < minimum_sampled_tree_weight:
478
+ minimum_sampled_tree = sampled_tree.copy()
479
+ minimum_sampled_tree_weight = sampled_tree_weight
480
+
481
+ # Orient the edges in that tree to keep the cost of the tree the same.
482
+ t_star = nx.MultiDiGraph()
483
+ for u, v, d in minimum_sampled_tree.edges(data=weight):
484
+ if d == G[u][v][weight]:
485
+ t_star.add_edge(u, v, **{weight: d})
486
+ else:
487
+ t_star.add_edge(v, u, **{weight: d})
488
+
489
+ # Find the node demands needed to neutralize the flow of t_star in G
490
+ node_demands = {n: t_star.out_degree(n) - t_star.in_degree(n) for n in t_star}
491
+ nx.set_node_attributes(G, node_demands, "demand")
492
+
493
+ # Find the min_cost_flow
494
+ flow_dict = nx.min_cost_flow(G, "demand")
495
+
496
+ # Build the flow into t_star
497
+ for source, values in flow_dict.items():
498
+ for target in values:
499
+ if (source, target) not in t_star.edges and values[target] > 0:
500
+ # IF values[target] > 0 we have to add that many edges
501
+ for _ in range(values[target]):
502
+ t_star.add_edge(source, target)
503
+
504
+ # Return the shortcut eulerian circuit
505
+ circuit = nx.eulerian_circuit(t_star, source=source)
506
+ return _shortcutting(circuit)
507
+
508
+
509
+ @nx._dispatchable(edge_attrs="weight", mutates_input=True, returns_graph=True)
510
+ def held_karp_ascent(G, weight="weight"):
511
+ """
512
+ Minimizes the Held-Karp relaxation of the TSP for `G`
513
+
514
+ Solves the Held-Karp relaxation of the input complete digraph and scales
515
+ the output solution for use in the Asadpour [1]_ ASTP algorithm.
516
+
517
+ The Held-Karp relaxation defines the lower bound for solutions to the
518
+ ATSP, although it does return a fractional solution. This is used in the
519
+ Asadpour algorithm as an initial solution which is later rounded to a
520
+ integral tree within the spanning tree polytopes. This function solves
521
+ the relaxation with the branch and bound method in [2]_.
522
+
523
+ Parameters
524
+ ----------
525
+ G : nx.DiGraph
526
+ The graph should be a complete weighted directed graph.
527
+ The distance between all paris of nodes should be included.
528
+
529
+ weight : string, optional (default="weight")
530
+ Edge data key corresponding to the edge weight.
531
+ If any edge does not have this attribute the weight is set to 1.
532
+
533
+ Returns
534
+ -------
535
+ OPT : float
536
+ The cost for the optimal solution to the Held-Karp relaxation
537
+ z : dict or nx.Graph
538
+ A symmetrized and scaled version of the optimal solution to the
539
+ Held-Karp relaxation for use in the Asadpour algorithm.
540
+
541
+ If an integral solution is found, then that is an optimal solution for
542
+ the ATSP problem and that is returned instead.
543
+
544
+ References
545
+ ----------
546
+ .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
547
+ An o(log n/log log n)-approximation algorithm for the asymmetric
548
+ traveling salesman problem, Operations research, 65 (2017),
549
+ pp. 1043–1061
550
+
551
+ .. [2] M. Held, R. M. Karp, The traveling-salesman problem and minimum
552
+ spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
553
+ pp.1138-1162
554
+ """
555
+ import numpy as np
556
+ from scipy import optimize
557
+
558
+ def k_pi():
559
+ """
560
+ Find the set of minimum 1-Arborescences for G at point pi.
561
+
562
+ Returns
563
+ -------
564
+ Set
565
+ The set of minimum 1-Arborescences
566
+ """
567
+ # Create a copy of G without vertex 1.
568
+ G_1 = G.copy()
569
+ minimum_1_arborescences = set()
570
+ minimum_1_arborescence_weight = math.inf
571
+
572
+ # node is node '1' in the Held and Karp paper
573
+ n = next(G.__iter__())
574
+ G_1.remove_node(n)
575
+
576
+ # Iterate over the spanning arborescences of the graph until we know
577
+ # that we have found the minimum 1-arborescences. My proposed strategy
578
+ # is to find the most extensive root to connect to from 'node 1' and
579
+ # the least expensive one. We then iterate over arborescences until
580
+ # the cost of the basic arborescence is the cost of the minimum one
581
+ # plus the difference between the most and least expensive roots,
582
+ # that way the cost of connecting 'node 1' will by definition not by
583
+ # minimum
584
+ min_root = {"node": None, weight: math.inf}
585
+ max_root = {"node": None, weight: -math.inf}
586
+ for u, v, d in G.edges(n, data=True):
587
+ if d[weight] < min_root[weight]:
588
+ min_root = {"node": v, weight: d[weight]}
589
+ if d[weight] > max_root[weight]:
590
+ max_root = {"node": v, weight: d[weight]}
591
+
592
+ min_in_edge = min(G.in_edges(n, data=True), key=lambda x: x[2][weight])
593
+ min_root[weight] = min_root[weight] + min_in_edge[2][weight]
594
+ max_root[weight] = max_root[weight] + min_in_edge[2][weight]
595
+
596
+ min_arb_weight = math.inf
597
+ for arb in nx.ArborescenceIterator(G_1):
598
+ arb_weight = arb.size(weight)
599
+ if min_arb_weight == math.inf:
600
+ min_arb_weight = arb_weight
601
+ elif arb_weight > min_arb_weight + max_root[weight] - min_root[weight]:
602
+ break
603
+ # We have to pick the root node of the arborescence for the out
604
+ # edge of the first vertex as that is the only node without an
605
+ # edge directed into it.
606
+ for N, deg in arb.in_degree:
607
+ if deg == 0:
608
+ # root found
609
+ arb.add_edge(n, N, **{weight: G[n][N][weight]})
610
+ arb_weight += G[n][N][weight]
611
+ break
612
+
613
+ # We can pick the minimum weight in-edge for the vertex with
614
+ # a cycle. If there are multiple edges with the same, minimum
615
+ # weight, We need to add all of them.
616
+ #
617
+ # Delete the edge (N, v) so that we cannot pick it.
618
+ edge_data = G[N][n]
619
+ G.remove_edge(N, n)
620
+ min_weight = min(G.in_edges(n, data=weight), key=lambda x: x[2])[2]
621
+ min_edges = [
622
+ (u, v, d) for u, v, d in G.in_edges(n, data=weight) if d == min_weight
623
+ ]
624
+ for u, v, d in min_edges:
625
+ new_arb = arb.copy()
626
+ new_arb.add_edge(u, v, **{weight: d})
627
+ new_arb_weight = arb_weight + d
628
+ # Check to see the weight of the arborescence, if it is a
629
+ # new minimum, clear all of the old potential minimum
630
+ # 1-arborescences and add this is the only one. If its
631
+ # weight is above the known minimum, do not add it.
632
+ if new_arb_weight < minimum_1_arborescence_weight:
633
+ minimum_1_arborescences.clear()
634
+ minimum_1_arborescence_weight = new_arb_weight
635
+ # We have a 1-arborescence, add it to the set
636
+ if new_arb_weight == minimum_1_arborescence_weight:
637
+ minimum_1_arborescences.add(new_arb)
638
+ G.add_edge(N, n, **edge_data)
639
+
640
+ return minimum_1_arborescences
641
+
642
+ def direction_of_ascent():
643
+ """
644
+ Find the direction of ascent at point pi.
645
+
646
+ See [1]_ for more information.
647
+
648
+ Returns
649
+ -------
650
+ dict
651
+ A mapping from the nodes of the graph which represents the direction
652
+ of ascent.
653
+
654
+ References
655
+ ----------
656
+ .. [1] M. Held, R. M. Karp, The traveling-salesman problem and minimum
657
+ spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
658
+ pp.1138-1162
659
+ """
660
+ # 1. Set d equal to the zero n-vector.
661
+ d = {}
662
+ for n in G:
663
+ d[n] = 0
664
+ del n
665
+ # 2. Find a 1-Arborescence T^k such that k is in K(pi, d).
666
+ minimum_1_arborescences = k_pi()
667
+ while True:
668
+ # Reduce K(pi) to K(pi, d)
669
+ # Find the arborescence in K(pi) which increases the lest in
670
+ # direction d
671
+ min_k_d_weight = math.inf
672
+ min_k_d = None
673
+ for arborescence in minimum_1_arborescences:
674
+ weighted_cost = 0
675
+ for n, deg in arborescence.degree:
676
+ weighted_cost += d[n] * (deg - 2)
677
+ if weighted_cost < min_k_d_weight:
678
+ min_k_d_weight = weighted_cost
679
+ min_k_d = arborescence
680
+
681
+ # 3. If sum of d_i * v_{i, k} is greater than zero, terminate
682
+ if min_k_d_weight > 0:
683
+ return d, min_k_d
684
+ # 4. d_i = d_i + v_{i, k}
685
+ for n, deg in min_k_d.degree:
686
+ d[n] += deg - 2
687
+ # Check that we do not need to terminate because the direction
688
+ # of ascent does not exist. This is done with linear
689
+ # programming.
690
+ c = np.full(len(minimum_1_arborescences), -1, dtype=int)
691
+ a_eq = np.empty((len(G) + 1, len(minimum_1_arborescences)), dtype=int)
692
+ b_eq = np.zeros(len(G) + 1, dtype=int)
693
+ b_eq[len(G)] = 1
694
+ for arb_count, arborescence in enumerate(minimum_1_arborescences):
695
+ n_count = len(G) - 1
696
+ for n, deg in arborescence.degree:
697
+ a_eq[n_count][arb_count] = deg - 2
698
+ n_count -= 1
699
+ a_eq[len(G)][arb_count] = 1
700
+ program_result = optimize.linprog(
701
+ c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm"
702
+ )
703
+ # If the constants exist, then the direction of ascent doesn't
704
+ if program_result.success:
705
+ # There is no direction of ascent
706
+ return None, minimum_1_arborescences
707
+
708
+ # 5. GO TO 2
709
+
710
+ def find_epsilon(k, d):
711
+ """
712
+ Given the direction of ascent at pi, find the maximum distance we can go
713
+ in that direction.
714
+
715
+ Parameters
716
+ ----------
717
+ k_xy : set
718
+ The set of 1-arborescences which have the minimum rate of increase
719
+ in the direction of ascent
720
+
721
+ d : dict
722
+ The direction of ascent
723
+
724
+ Returns
725
+ -------
726
+ float
727
+ The distance we can travel in direction `d`
728
+ """
729
+ min_epsilon = math.inf
730
+ for e_u, e_v, e_w in G.edges(data=weight):
731
+ if (e_u, e_v) in k.edges:
732
+ continue
733
+ # Now, I have found a condition which MUST be true for the edges to
734
+ # be a valid substitute. The edge in the graph which is the
735
+ # substitute is the one with the same terminated end. This can be
736
+ # checked rather simply.
737
+ #
738
+ # Find the edge within k which is the substitute. Because k is a
739
+ # 1-arborescence, we know that they is only one such edges
740
+ # leading into every vertex.
741
+ if len(k.in_edges(e_v, data=weight)) > 1:
742
+ raise Exception
743
+ sub_u, sub_v, sub_w = next(k.in_edges(e_v, data=weight).__iter__())
744
+ k.add_edge(e_u, e_v, **{weight: e_w})
745
+ k.remove_edge(sub_u, sub_v)
746
+ if (
747
+ max(d for n, d in k.in_degree()) <= 1
748
+ and len(G) == k.number_of_edges()
749
+ and nx.is_weakly_connected(k)
750
+ ):
751
+ # Ascent method calculation
752
+ if d[sub_u] == d[e_u] or sub_w == e_w:
753
+ # Revert to the original graph
754
+ k.remove_edge(e_u, e_v)
755
+ k.add_edge(sub_u, sub_v, **{weight: sub_w})
756
+ continue
757
+ epsilon = (sub_w - e_w) / (d[e_u] - d[sub_u])
758
+ if 0 < epsilon < min_epsilon:
759
+ min_epsilon = epsilon
760
+ # Revert to the original graph
761
+ k.remove_edge(e_u, e_v)
762
+ k.add_edge(sub_u, sub_v, **{weight: sub_w})
763
+
764
+ return min_epsilon
765
+
766
+ # I have to know that the elements in pi correspond to the correct elements
767
+ # in the direction of ascent, even if the node labels are not integers.
768
+ # Thus, I will use dictionaries to made that mapping.
769
+ pi_dict = {}
770
+ for n in G:
771
+ pi_dict[n] = 0
772
+ del n
773
+ original_edge_weights = {}
774
+ for u, v, d in G.edges(data=True):
775
+ original_edge_weights[(u, v)] = d[weight]
776
+ dir_ascent, k_d = direction_of_ascent()
777
+ while dir_ascent is not None:
778
+ max_distance = find_epsilon(k_d, dir_ascent)
779
+ for n, v in dir_ascent.items():
780
+ pi_dict[n] += max_distance * v
781
+ for u, v, d in G.edges(data=True):
782
+ d[weight] = original_edge_weights[(u, v)] + pi_dict[u]
783
+ dir_ascent, k_d = direction_of_ascent()
784
+ nx._clear_cache(G)
785
+ # k_d is no longer an individual 1-arborescence but rather a set of
786
+ # minimal 1-arborescences at the maximum point of the polytope and should
787
+ # be reflected as such
788
+ k_max = k_d
789
+
790
+ # Search for a cycle within k_max. If a cycle exists, return it as the
791
+ # solution
792
+ for k in k_max:
793
+ if len([n for n in k if k.degree(n) == 2]) == G.order():
794
+ # Tour found
795
+ # TODO: this branch does not restore original_edge_weights of G!
796
+ return k.size(weight), k
797
+
798
+ # Write the original edge weights back to G and every member of k_max at
799
+ # the maximum point. Also average the number of times that edge appears in
800
+ # the set of minimal 1-arborescences.
801
+ x_star = {}
802
+ size_k_max = len(k_max)
803
+ for u, v, d in G.edges(data=True):
804
+ edge_count = 0
805
+ d[weight] = original_edge_weights[(u, v)]
806
+ for k in k_max:
807
+ if (u, v) in k.edges():
808
+ edge_count += 1
809
+ k[u][v][weight] = original_edge_weights[(u, v)]
810
+ x_star[(u, v)] = edge_count / size_k_max
811
+ # Now symmetrize the edges in x_star and scale them according to (5) in
812
+ # reference [1]
813
+ z_star = {}
814
+ scale_factor = (G.order() - 1) / G.order()
815
+ for u, v in x_star:
816
+ frequency = x_star[(u, v)] + x_star[(v, u)]
817
+ if frequency > 0:
818
+ z_star[(u, v)] = scale_factor * frequency
819
+ del x_star
820
+ # Return the optimal weight and the z dict
821
+ return next(k_max.__iter__()).size(weight), z_star
822
+
823
+
824
+ @nx._dispatchable
825
+ def spanning_tree_distribution(G, z):
826
+ """
827
+ Find the asadpour exponential distribution of spanning trees.
828
+
829
+ Solves the Maximum Entropy Convex Program in the Asadpour algorithm [1]_
830
+ using the approach in section 7 to build an exponential distribution of
831
+ undirected spanning trees.
832
+
833
+ This algorithm ensures that the probability of any edge in a spanning
834
+ tree is proportional to the sum of the probabilities of the tress
835
+ containing that edge over the sum of the probabilities of all spanning
836
+ trees of the graph.
837
+
838
+ Parameters
839
+ ----------
840
+ G : nx.MultiGraph
841
+ The undirected support graph for the Held Karp relaxation
842
+
843
+ z : dict
844
+ The output of `held_karp_ascent()`, a scaled version of the Held-Karp
845
+ solution.
846
+
847
+ Returns
848
+ -------
849
+ gamma : dict
850
+ The probability distribution which approximately preserves the marginal
851
+ probabilities of `z`.
852
+ """
853
+ from math import exp
854
+ from math import log as ln
855
+
856
+ def q(e):
857
+ """
858
+ The value of q(e) is described in the Asadpour paper is "the
859
+ probability that edge e will be included in a spanning tree T that is
860
+ chosen with probability proportional to exp(gamma(T))" which
861
+ basically means that it is the total probability of the edge appearing
862
+ across the whole distribution.
863
+
864
+ Parameters
865
+ ----------
866
+ e : tuple
867
+ The `(u, v)` tuple describing the edge we are interested in
868
+
869
+ Returns
870
+ -------
871
+ float
872
+ The probability that a spanning tree chosen according to the
873
+ current values of gamma will include edge `e`.
874
+ """
875
+ # Create the laplacian matrices
876
+ for u, v, d in G.edges(data=True):
877
+ d[lambda_key] = exp(gamma[(u, v)])
878
+ G_Kirchhoff = nx.total_spanning_tree_weight(G, lambda_key)
879
+ G_e = nx.contracted_edge(G, e, self_loops=False)
880
+ G_e_Kirchhoff = nx.total_spanning_tree_weight(G_e, lambda_key)
881
+
882
+ # Multiply by the weight of the contracted edge since it is not included
883
+ # in the total weight of the contracted graph.
884
+ return exp(gamma[(e[0], e[1])]) * G_e_Kirchhoff / G_Kirchhoff
885
+
886
+ # initialize gamma to the zero dict
887
+ gamma = {}
888
+ for u, v, _ in G.edges:
889
+ gamma[(u, v)] = 0
890
+
891
+ # set epsilon
892
+ EPSILON = 0.2
893
+
894
+ # pick an edge attribute name that is unlikely to be in the graph
895
+ lambda_key = "spanning_tree_distribution's secret attribute name for lambda"
896
+
897
+ while True:
898
+ # We need to know that know that no values of q_e are greater than
899
+ # (1 + epsilon) * z_e, however changing one gamma value can increase the
900
+ # value of a different q_e, so we have to complete the for loop without
901
+ # changing anything for the condition to be meet
902
+ in_range_count = 0
903
+ # Search for an edge with q_e > (1 + epsilon) * z_e
904
+ for u, v in gamma:
905
+ e = (u, v)
906
+ q_e = q(e)
907
+ z_e = z[e]
908
+ if q_e > (1 + EPSILON) * z_e:
909
+ delta = ln(
910
+ (q_e * (1 - (1 + EPSILON / 2) * z_e))
911
+ / ((1 - q_e) * (1 + EPSILON / 2) * z_e)
912
+ )
913
+ gamma[e] -= delta
914
+ # Check that delta had the desired effect
915
+ new_q_e = q(e)
916
+ desired_q_e = (1 + EPSILON / 2) * z_e
917
+ if round(new_q_e, 8) != round(desired_q_e, 8):
918
+ raise nx.NetworkXError(
919
+ f"Unable to modify probability for edge ({u}, {v})"
920
+ )
921
+ else:
922
+ in_range_count += 1
923
+ # Check if the for loop terminated without changing any gamma
924
+ if in_range_count == len(gamma):
925
+ break
926
+
927
+ # Remove the new edge attributes
928
+ for _, _, d in G.edges(data=True):
929
+ if lambda_key in d:
930
+ del d[lambda_key]
931
+
932
+ return gamma
933
+
934
+
935
+ @nx._dispatchable(edge_attrs="weight")
936
+ def greedy_tsp(G, weight="weight", source=None):
937
+ """Return a low cost cycle starting at `source` and its cost.
938
+
939
+ This approximates a solution to the traveling salesman problem.
940
+ It finds a cycle of all the nodes that a salesman can visit in order
941
+ to visit many nodes while minimizing total distance.
942
+ It uses a simple greedy algorithm.
943
+ In essence, this function returns a large cycle given a source point
944
+ for which the total cost of the cycle is minimized.
945
+
946
+ Parameters
947
+ ----------
948
+ G : Graph
949
+ The Graph should be a complete weighted undirected graph.
950
+ The distance between all pairs of nodes should be included.
951
+
952
+ weight : string, optional (default="weight")
953
+ Edge data key corresponding to the edge weight.
954
+ If any edge does not have this attribute the weight is set to 1.
955
+
956
+ source : node, optional (default: first node in list(G))
957
+ Starting node. If None, defaults to ``next(iter(G))``
958
+
959
+ Returns
960
+ -------
961
+ cycle : list of nodes
962
+ Returns the cycle (list of nodes) that a salesman
963
+ can follow to minimize total weight of the trip.
964
+
965
+ Raises
966
+ ------
967
+ NetworkXError
968
+ If `G` is not complete, the algorithm raises an exception.
969
+
970
+ Examples
971
+ --------
972
+ >>> from networkx.algorithms import approximation as approx
973
+ >>> G = nx.DiGraph()
974
+ >>> G.add_weighted_edges_from(
975
+ ... {
976
+ ... ("A", "B", 3),
977
+ ... ("A", "C", 17),
978
+ ... ("A", "D", 14),
979
+ ... ("B", "A", 3),
980
+ ... ("B", "C", 12),
981
+ ... ("B", "D", 16),
982
+ ... ("C", "A", 13),
983
+ ... ("C", "B", 12),
984
+ ... ("C", "D", 4),
985
+ ... ("D", "A", 14),
986
+ ... ("D", "B", 15),
987
+ ... ("D", "C", 2),
988
+ ... }
989
+ ... )
990
+ >>> cycle = approx.greedy_tsp(G, source="D")
991
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
992
+ >>> cycle
993
+ ['D', 'C', 'B', 'A', 'D']
994
+ >>> cost
995
+ 31
996
+
997
+ Notes
998
+ -----
999
+ This implementation of a greedy algorithm is based on the following:
1000
+
1001
+ - The algorithm adds a node to the solution at every iteration.
1002
+ - The algorithm selects a node not already in the cycle whose connection
1003
+ to the previous node adds the least cost to the cycle.
1004
+
1005
+ A greedy algorithm does not always give the best solution.
1006
+ However, it can construct a first feasible solution which can
1007
+ be passed as a parameter to an iterative improvement algorithm such
1008
+ as Simulated Annealing, or Threshold Accepting.
1009
+
1010
+ Time complexity: It has a running time $O(|V|^2)$
1011
+ """
1012
+ # Check that G is a complete graph
1013
+ N = len(G) - 1
1014
+ # This check ignores selfloops which is what we want here.
1015
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1016
+ raise nx.NetworkXError("G must be a complete graph.")
1017
+
1018
+ if source is None:
1019
+ source = nx.utils.arbitrary_element(G)
1020
+
1021
+ if G.number_of_nodes() == 2:
1022
+ neighbor = next(G.neighbors(source))
1023
+ return [source, neighbor, source]
1024
+
1025
+ nodeset = set(G)
1026
+ nodeset.remove(source)
1027
+ cycle = [source]
1028
+ next_node = source
1029
+ while nodeset:
1030
+ nbrdict = G[next_node]
1031
+ next_node = min(nodeset, key=lambda n: nbrdict[n].get(weight, 1))
1032
+ cycle.append(next_node)
1033
+ nodeset.remove(next_node)
1034
+ cycle.append(cycle[0])
1035
+ return cycle
1036
+
1037
+
1038
+ @py_random_state(9)
1039
+ @nx._dispatchable(edge_attrs="weight")
1040
+ def simulated_annealing_tsp(
1041
+ G,
1042
+ init_cycle,
1043
+ weight="weight",
1044
+ source=None,
1045
+ temp=100,
1046
+ move="1-1",
1047
+ max_iterations=10,
1048
+ N_inner=100,
1049
+ alpha=0.01,
1050
+ seed=None,
1051
+ ):
1052
+ """Returns an approximate solution to the traveling salesman problem.
1053
+
1054
+ This function uses simulated annealing to approximate the minimal cost
1055
+ cycle through the nodes. Starting from a suboptimal solution, simulated
1056
+ annealing perturbs that solution, occasionally accepting changes that make
1057
+ the solution worse to escape from a locally optimal solution. The chance
1058
+ of accepting such changes decreases over the iterations to encourage
1059
+ an optimal result. In summary, the function returns a cycle starting
1060
+ at `source` for which the total cost is minimized. It also returns the cost.
1061
+
1062
+ The chance of accepting a proposed change is related to a parameter called
1063
+ the temperature (annealing has a physical analogue of steel hardening
1064
+ as it cools). As the temperature is reduced, the chance of moves that
1065
+ increase cost goes down.
1066
+
1067
+ Parameters
1068
+ ----------
1069
+ G : Graph
1070
+ `G` should be a complete weighted graph.
1071
+ The distance between all pairs of nodes should be included.
1072
+
1073
+ init_cycle : list of all nodes or "greedy"
1074
+ The initial solution (a cycle through all nodes returning to the start).
1075
+ This argument has no default to make you think about it.
1076
+ If "greedy", use `greedy_tsp(G, weight)`.
1077
+ Other common starting cycles are `list(G) + [next(iter(G))]` or the final
1078
+ result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
1079
+
1080
+ weight : string, optional (default="weight")
1081
+ Edge data key corresponding to the edge weight.
1082
+ If any edge does not have this attribute the weight is set to 1.
1083
+
1084
+ source : node, optional (default: first node in list(G))
1085
+ Starting node. If None, defaults to ``next(iter(G))``
1086
+
1087
+ temp : int, optional (default=100)
1088
+ The algorithm's temperature parameter. It represents the initial
1089
+ value of temperature
1090
+
1091
+ move : "1-1" or "1-0" or function, optional (default="1-1")
1092
+ Indicator of what move to use when finding new trial solutions.
1093
+ Strings indicate two special built-in moves:
1094
+
1095
+ - "1-1": 1-1 exchange which transposes the position
1096
+ of two elements of the current solution.
1097
+ The function called is :func:`swap_two_nodes`.
1098
+ For example if we apply 1-1 exchange in the solution
1099
+ ``A = [3, 2, 1, 4, 3]``
1100
+ we can get the following by the transposition of 1 and 4 elements:
1101
+ ``A' = [3, 2, 4, 1, 3]``
1102
+ - "1-0": 1-0 exchange which moves an node in the solution
1103
+ to a new position.
1104
+ The function called is :func:`move_one_node`.
1105
+ For example if we apply 1-0 exchange in the solution
1106
+ ``A = [3, 2, 1, 4, 3]``
1107
+ we can transfer the fourth element to the second position:
1108
+ ``A' = [3, 4, 2, 1, 3]``
1109
+
1110
+ You may provide your own functions to enact a move from
1111
+ one solution to a neighbor solution. The function must take
1112
+ the solution as input along with a `seed` input to control
1113
+ random number generation (see the `seed` input here).
1114
+ Your function should maintain the solution as a cycle with
1115
+ equal first and last node and all others appearing once.
1116
+ Your function should return the new solution.
1117
+
1118
+ max_iterations : int, optional (default=10)
1119
+ Declared done when this number of consecutive iterations of
1120
+ the outer loop occurs without any change in the best cost solution.
1121
+
1122
+ N_inner : int, optional (default=100)
1123
+ The number of iterations of the inner loop.
1124
+
1125
+ alpha : float between (0, 1), optional (default=0.01)
1126
+ Percentage of temperature decrease in each iteration
1127
+ of outer loop
1128
+
1129
+ seed : integer, random_state, or None (default)
1130
+ Indicator of random number generation state.
1131
+ See :ref:`Randomness<randomness>`.
1132
+
1133
+ Returns
1134
+ -------
1135
+ cycle : list of nodes
1136
+ Returns the cycle (list of nodes) that a salesman
1137
+ can follow to minimize total weight of the trip.
1138
+
1139
+ Raises
1140
+ ------
1141
+ NetworkXError
1142
+ If `G` is not complete the algorithm raises an exception.
1143
+
1144
+ Examples
1145
+ --------
1146
+ >>> from networkx.algorithms import approximation as approx
1147
+ >>> G = nx.DiGraph()
1148
+ >>> G.add_weighted_edges_from(
1149
+ ... {
1150
+ ... ("A", "B", 3),
1151
+ ... ("A", "C", 17),
1152
+ ... ("A", "D", 14),
1153
+ ... ("B", "A", 3),
1154
+ ... ("B", "C", 12),
1155
+ ... ("B", "D", 16),
1156
+ ... ("C", "A", 13),
1157
+ ... ("C", "B", 12),
1158
+ ... ("C", "D", 4),
1159
+ ... ("D", "A", 14),
1160
+ ... ("D", "B", 15),
1161
+ ... ("D", "C", 2),
1162
+ ... }
1163
+ ... )
1164
+ >>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D")
1165
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1166
+ >>> cycle
1167
+ ['D', 'C', 'B', 'A', 'D']
1168
+ >>> cost
1169
+ 31
1170
+ >>> incycle = ["D", "B", "A", "C", "D"]
1171
+ >>> cycle = approx.simulated_annealing_tsp(G, incycle, source="D")
1172
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1173
+ >>> cycle
1174
+ ['D', 'C', 'B', 'A', 'D']
1175
+ >>> cost
1176
+ 31
1177
+
1178
+ Notes
1179
+ -----
1180
+ Simulated Annealing is a metaheuristic local search algorithm.
1181
+ The main characteristic of this algorithm is that it accepts
1182
+ even solutions which lead to the increase of the cost in order
1183
+ to escape from low quality local optimal solutions.
1184
+
1185
+ This algorithm needs an initial solution. If not provided, it is
1186
+ constructed by a simple greedy algorithm. At every iteration, the
1187
+ algorithm selects thoughtfully a neighbor solution.
1188
+ Consider $c(x)$ cost of current solution and $c(x')$ cost of a
1189
+ neighbor solution.
1190
+ If $c(x') - c(x) <= 0$ then the neighbor solution becomes the current
1191
+ solution for the next iteration. Otherwise, the algorithm accepts
1192
+ the neighbor solution with probability $p = exp - ([c(x') - c(x)] / temp)$.
1193
+ Otherwise the current solution is retained.
1194
+
1195
+ `temp` is a parameter of the algorithm and represents temperature.
1196
+
1197
+ Time complexity:
1198
+ For $N_i$ iterations of the inner loop and $N_o$ iterations of the
1199
+ outer loop, this algorithm has running time $O(N_i * N_o * |V|)$.
1200
+
1201
+ For more information and how the algorithm is inspired see:
1202
+ http://en.wikipedia.org/wiki/Simulated_annealing
1203
+ """
1204
+ if move == "1-1":
1205
+ move = swap_two_nodes
1206
+ elif move == "1-0":
1207
+ move = move_one_node
1208
+ if init_cycle == "greedy":
1209
+ # Construct an initial solution using a greedy algorithm.
1210
+ cycle = greedy_tsp(G, weight=weight, source=source)
1211
+ if G.number_of_nodes() == 2:
1212
+ return cycle
1213
+
1214
+ else:
1215
+ cycle = list(init_cycle)
1216
+ if source is None:
1217
+ source = cycle[0]
1218
+ elif source != cycle[0]:
1219
+ raise nx.NetworkXError("source must be first node in init_cycle")
1220
+ if cycle[0] != cycle[-1]:
1221
+ raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
1222
+
1223
+ if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
1224
+ raise nx.NetworkXError("init_cycle should be a cycle over all nodes in G.")
1225
+
1226
+ # Check that G is a complete graph
1227
+ N = len(G) - 1
1228
+ # This check ignores selfloops which is what we want here.
1229
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1230
+ raise nx.NetworkXError("G must be a complete graph.")
1231
+
1232
+ if G.number_of_nodes() == 2:
1233
+ neighbor = next(G.neighbors(source))
1234
+ return [source, neighbor, source]
1235
+
1236
+ # Find the cost of initial solution
1237
+ cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
1238
+
1239
+ count = 0
1240
+ best_cycle = cycle.copy()
1241
+ best_cost = cost
1242
+ while count <= max_iterations and temp > 0:
1243
+ count += 1
1244
+ for i in range(N_inner):
1245
+ adj_sol = move(cycle, seed)
1246
+ adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
1247
+ delta = adj_cost - cost
1248
+ if delta <= 0:
1249
+ # Set current solution the adjacent solution.
1250
+ cycle = adj_sol
1251
+ cost = adj_cost
1252
+
1253
+ if cost < best_cost:
1254
+ count = 0
1255
+ best_cycle = cycle.copy()
1256
+ best_cost = cost
1257
+ else:
1258
+ # Accept even a worse solution with probability p.
1259
+ p = math.exp(-delta / temp)
1260
+ if p >= seed.random():
1261
+ cycle = adj_sol
1262
+ cost = adj_cost
1263
+ temp -= temp * alpha
1264
+
1265
+ return best_cycle
1266
+
1267
+
1268
+ @py_random_state(9)
1269
+ @nx._dispatchable(edge_attrs="weight")
1270
+ def threshold_accepting_tsp(
1271
+ G,
1272
+ init_cycle,
1273
+ weight="weight",
1274
+ source=None,
1275
+ threshold=1,
1276
+ move="1-1",
1277
+ max_iterations=10,
1278
+ N_inner=100,
1279
+ alpha=0.1,
1280
+ seed=None,
1281
+ ):
1282
+ """Returns an approximate solution to the traveling salesman problem.
1283
+
1284
+ This function uses threshold accepting methods to approximate the minimal cost
1285
+ cycle through the nodes. Starting from a suboptimal solution, threshold
1286
+ accepting methods perturb that solution, accepting any changes that make
1287
+ the solution no worse than increasing by a threshold amount. Improvements
1288
+ in cost are accepted, but so are changes leading to small increases in cost.
1289
+ This allows the solution to leave suboptimal local minima in solution space.
1290
+ The threshold is decreased slowly as iterations proceed helping to ensure
1291
+ an optimum. In summary, the function returns a cycle starting at `source`
1292
+ for which the total cost is minimized.
1293
+
1294
+ Parameters
1295
+ ----------
1296
+ G : Graph
1297
+ `G` should be a complete weighted graph.
1298
+ The distance between all pairs of nodes should be included.
1299
+
1300
+ init_cycle : list or "greedy"
1301
+ The initial solution (a cycle through all nodes returning to the start).
1302
+ This argument has no default to make you think about it.
1303
+ If "greedy", use `greedy_tsp(G, weight)`.
1304
+ Other common starting cycles are `list(G) + [next(iter(G))]` or the final
1305
+ result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
1306
+
1307
+ weight : string, optional (default="weight")
1308
+ Edge data key corresponding to the edge weight.
1309
+ If any edge does not have this attribute the weight is set to 1.
1310
+
1311
+ source : node, optional (default: first node in list(G))
1312
+ Starting node. If None, defaults to ``next(iter(G))``
1313
+
1314
+ threshold : int, optional (default=1)
1315
+ The algorithm's threshold parameter. It represents the initial
1316
+ threshold's value
1317
+
1318
+ move : "1-1" or "1-0" or function, optional (default="1-1")
1319
+ Indicator of what move to use when finding new trial solutions.
1320
+ Strings indicate two special built-in moves:
1321
+
1322
+ - "1-1": 1-1 exchange which transposes the position
1323
+ of two elements of the current solution.
1324
+ The function called is :func:`swap_two_nodes`.
1325
+ For example if we apply 1-1 exchange in the solution
1326
+ ``A = [3, 2, 1, 4, 3]``
1327
+ we can get the following by the transposition of 1 and 4 elements:
1328
+ ``A' = [3, 2, 4, 1, 3]``
1329
+ - "1-0": 1-0 exchange which moves an node in the solution
1330
+ to a new position.
1331
+ The function called is :func:`move_one_node`.
1332
+ For example if we apply 1-0 exchange in the solution
1333
+ ``A = [3, 2, 1, 4, 3]``
1334
+ we can transfer the fourth element to the second position:
1335
+ ``A' = [3, 4, 2, 1, 3]``
1336
+
1337
+ You may provide your own functions to enact a move from
1338
+ one solution to a neighbor solution. The function must take
1339
+ the solution as input along with a `seed` input to control
1340
+ random number generation (see the `seed` input here).
1341
+ Your function should maintain the solution as a cycle with
1342
+ equal first and last node and all others appearing once.
1343
+ Your function should return the new solution.
1344
+
1345
+ max_iterations : int, optional (default=10)
1346
+ Declared done when this number of consecutive iterations of
1347
+ the outer loop occurs without any change in the best cost solution.
1348
+
1349
+ N_inner : int, optional (default=100)
1350
+ The number of iterations of the inner loop.
1351
+
1352
+ alpha : float between (0, 1), optional (default=0.1)
1353
+ Percentage of threshold decrease when there is at
1354
+ least one acceptance of a neighbor solution.
1355
+ If no inner loop moves are accepted the threshold remains unchanged.
1356
+
1357
+ seed : integer, random_state, or None (default)
1358
+ Indicator of random number generation state.
1359
+ See :ref:`Randomness<randomness>`.
1360
+
1361
+ Returns
1362
+ -------
1363
+ cycle : list of nodes
1364
+ Returns the cycle (list of nodes) that a salesman
1365
+ can follow to minimize total weight of the trip.
1366
+
1367
+ Raises
1368
+ ------
1369
+ NetworkXError
1370
+ If `G` is not complete the algorithm raises an exception.
1371
+
1372
+ Examples
1373
+ --------
1374
+ >>> from networkx.algorithms import approximation as approx
1375
+ >>> G = nx.DiGraph()
1376
+ >>> G.add_weighted_edges_from(
1377
+ ... {
1378
+ ... ("A", "B", 3),
1379
+ ... ("A", "C", 17),
1380
+ ... ("A", "D", 14),
1381
+ ... ("B", "A", 3),
1382
+ ... ("B", "C", 12),
1383
+ ... ("B", "D", 16),
1384
+ ... ("C", "A", 13),
1385
+ ... ("C", "B", 12),
1386
+ ... ("C", "D", 4),
1387
+ ... ("D", "A", 14),
1388
+ ... ("D", "B", 15),
1389
+ ... ("D", "C", 2),
1390
+ ... }
1391
+ ... )
1392
+ >>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D")
1393
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1394
+ >>> cycle
1395
+ ['D', 'C', 'B', 'A', 'D']
1396
+ >>> cost
1397
+ 31
1398
+ >>> incycle = ["D", "B", "A", "C", "D"]
1399
+ >>> cycle = approx.threshold_accepting_tsp(G, incycle, source="D")
1400
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1401
+ >>> cycle
1402
+ ['D', 'C', 'B', 'A', 'D']
1403
+ >>> cost
1404
+ 31
1405
+
1406
+ Notes
1407
+ -----
1408
+ Threshold Accepting is a metaheuristic local search algorithm.
1409
+ The main characteristic of this algorithm is that it accepts
1410
+ even solutions which lead to the increase of the cost in order
1411
+ to escape from low quality local optimal solutions.
1412
+
1413
+ This algorithm needs an initial solution. This solution can be
1414
+ constructed by a simple greedy algorithm. At every iteration, it
1415
+ selects thoughtfully a neighbor solution.
1416
+ Consider $c(x)$ cost of current solution and $c(x')$ cost of
1417
+ neighbor solution.
1418
+ If $c(x') - c(x) <= threshold$ then the neighbor solution becomes the current
1419
+ solution for the next iteration, where the threshold is named threshold.
1420
+
1421
+ In comparison to the Simulated Annealing algorithm, the Threshold
1422
+ Accepting algorithm does not accept very low quality solutions
1423
+ (due to the presence of the threshold value). In the case of
1424
+ Simulated Annealing, even a very low quality solution can
1425
+ be accepted with probability $p$.
1426
+
1427
+ Time complexity:
1428
+ It has a running time $O(m * n * |V|)$ where $m$ and $n$ are the number
1429
+ of times the outer and inner loop run respectively.
1430
+
1431
+ For more information and how algorithm is inspired see:
1432
+ https://doi.org/10.1016/0021-9991(90)90201-B
1433
+
1434
+ See Also
1435
+ --------
1436
+ simulated_annealing_tsp
1437
+
1438
+ """
1439
+ if move == "1-1":
1440
+ move = swap_two_nodes
1441
+ elif move == "1-0":
1442
+ move = move_one_node
1443
+ if init_cycle == "greedy":
1444
+ # Construct an initial solution using a greedy algorithm.
1445
+ cycle = greedy_tsp(G, weight=weight, source=source)
1446
+ if G.number_of_nodes() == 2:
1447
+ return cycle
1448
+
1449
+ else:
1450
+ cycle = list(init_cycle)
1451
+ if source is None:
1452
+ source = cycle[0]
1453
+ elif source != cycle[0]:
1454
+ raise nx.NetworkXError("source must be first node in init_cycle")
1455
+ if cycle[0] != cycle[-1]:
1456
+ raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
1457
+
1458
+ if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
1459
+ raise nx.NetworkXError("init_cycle is not all and only nodes.")
1460
+
1461
+ # Check that G is a complete graph
1462
+ N = len(G) - 1
1463
+ # This check ignores selfloops which is what we want here.
1464
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1465
+ raise nx.NetworkXError("G must be a complete graph.")
1466
+
1467
+ if G.number_of_nodes() == 2:
1468
+ neighbor = list(G.neighbors(source))[0]
1469
+ return [source, neighbor, source]
1470
+
1471
+ # Find the cost of initial solution
1472
+ cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
1473
+
1474
+ count = 0
1475
+ best_cycle = cycle.copy()
1476
+ best_cost = cost
1477
+ while count <= max_iterations:
1478
+ count += 1
1479
+ accepted = False
1480
+ for i in range(N_inner):
1481
+ adj_sol = move(cycle, seed)
1482
+ adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
1483
+ delta = adj_cost - cost
1484
+ if delta <= threshold:
1485
+ accepted = True
1486
+
1487
+ # Set current solution the adjacent solution.
1488
+ cycle = adj_sol
1489
+ cost = adj_cost
1490
+
1491
+ if cost < best_cost:
1492
+ count = 0
1493
+ best_cycle = cycle.copy()
1494
+ best_cost = cost
1495
+ if accepted:
1496
+ threshold -= threshold * alpha
1497
+
1498
+ return best_cycle
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/treewidth.py ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing treewidth decomposition.
2
+
3
+ Treewidth of an undirected graph is a number associated with the graph.
4
+ It can be defined as the size of the largest vertex set (bag) in a tree
5
+ decomposition of the graph minus one.
6
+
7
+ `Wikipedia: Treewidth <https://en.wikipedia.org/wiki/Treewidth>`_
8
+
9
+ The notions of treewidth and tree decomposition have gained their
10
+ attractiveness partly because many graph and network problems that are
11
+ intractable (e.g., NP-hard) on arbitrary graphs become efficiently
12
+ solvable (e.g., with a linear time algorithm) when the treewidth of the
13
+ input graphs is bounded by a constant [1]_ [2]_.
14
+
15
+ There are two different functions for computing a tree decomposition:
16
+ :func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`.
17
+
18
+ .. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth
19
+ computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275.
20
+ http://dx.doi.org/10.1016/j.ic.2009.03.008
21
+
22
+ .. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information
23
+ and Computing Sciences, Utrecht University.
24
+ Technical Report UU-CS-2005-018.
25
+ http://www.cs.uu.nl
26
+
27
+ .. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*.
28
+ https://web.archive.org/web/20210507025929/http://web.eecs.utk.edu/~cphill25/cs594_spring2015_projects/treewidth.pdf
29
+
30
+ """
31
+
32
+ import itertools
33
+ import sys
34
+ from heapq import heapify, heappop, heappush
35
+
36
+ import networkx as nx
37
+ from networkx.utils import not_implemented_for
38
+
39
+ __all__ = ["treewidth_min_degree", "treewidth_min_fill_in"]
40
+
41
+
42
+ @not_implemented_for("directed")
43
+ @not_implemented_for("multigraph")
44
+ @nx._dispatchable(returns_graph=True)
45
+ def treewidth_min_degree(G):
46
+ """Returns a treewidth decomposition using the Minimum Degree heuristic.
47
+
48
+ The heuristic chooses the nodes according to their degree, i.e., first
49
+ the node with the lowest degree is chosen, then the graph is updated
50
+ and the corresponding node is removed. Next, a new node with the lowest
51
+ degree is chosen, and so on.
52
+
53
+ Parameters
54
+ ----------
55
+ G : NetworkX graph
56
+
57
+ Returns
58
+ -------
59
+ Treewidth decomposition : (int, Graph) tuple
60
+ 2-tuple with treewidth and the corresponding decomposed tree.
61
+ """
62
+ deg_heuristic = MinDegreeHeuristic(G)
63
+ return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph))
64
+
65
+
66
+ @not_implemented_for("directed")
67
+ @not_implemented_for("multigraph")
68
+ @nx._dispatchable(returns_graph=True)
69
+ def treewidth_min_fill_in(G):
70
+ """Returns a treewidth decomposition using the Minimum Fill-in heuristic.
71
+
72
+ The heuristic chooses a node from the graph, where the number of edges
73
+ added turning the neighborhood of the chosen node into clique is as
74
+ small as possible.
75
+
76
+ Parameters
77
+ ----------
78
+ G : NetworkX graph
79
+
80
+ Returns
81
+ -------
82
+ Treewidth decomposition : (int, Graph) tuple
83
+ 2-tuple with treewidth and the corresponding decomposed tree.
84
+ """
85
+ return treewidth_decomp(G, min_fill_in_heuristic)
86
+
87
+
88
+ class MinDegreeHeuristic:
89
+ """Implements the Minimum Degree heuristic.
90
+
91
+ The heuristic chooses the nodes according to their degree
92
+ (number of neighbors), i.e., first the node with the lowest degree is
93
+ chosen, then the graph is updated and the corresponding node is
94
+ removed. Next, a new node with the lowest degree is chosen, and so on.
95
+ """
96
+
97
+ def __init__(self, graph):
98
+ self._graph = graph
99
+
100
+ # nodes that have to be updated in the heap before each iteration
101
+ self._update_nodes = []
102
+
103
+ self._degreeq = [] # a heapq with 3-tuples (degree,unique_id,node)
104
+ self.count = itertools.count()
105
+
106
+ # build heap with initial degrees
107
+ for n in graph:
108
+ self._degreeq.append((len(graph[n]), next(self.count), n))
109
+ heapify(self._degreeq)
110
+
111
+ def best_node(self, graph):
112
+ # update nodes in self._update_nodes
113
+ for n in self._update_nodes:
114
+ # insert changed degrees into degreeq
115
+ heappush(self._degreeq, (len(graph[n]), next(self.count), n))
116
+
117
+ # get the next valid (minimum degree) node
118
+ while self._degreeq:
119
+ (min_degree, _, elim_node) = heappop(self._degreeq)
120
+ if elim_node not in graph or len(graph[elim_node]) != min_degree:
121
+ # outdated entry in degreeq
122
+ continue
123
+ elif min_degree == len(graph) - 1:
124
+ # fully connected: abort condition
125
+ return None
126
+
127
+ # remember to update nodes in the heap before getting the next node
128
+ self._update_nodes = graph[elim_node]
129
+ return elim_node
130
+
131
+ # the heap is empty: abort
132
+ return None
133
+
134
+
135
+ def min_fill_in_heuristic(graph):
136
+ """Implements the Minimum Degree heuristic.
137
+
138
+ Returns the node from the graph, where the number of edges added when
139
+ turning the neighborhood of the chosen node into clique is as small as
140
+ possible. This algorithm chooses the nodes using the Minimum Fill-In
141
+ heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses
142
+ additional constant memory."""
143
+
144
+ if len(graph) == 0:
145
+ return None
146
+
147
+ min_fill_in_node = None
148
+
149
+ min_fill_in = sys.maxsize
150
+
151
+ # sort nodes by degree
152
+ nodes_by_degree = sorted(graph, key=lambda x: len(graph[x]))
153
+ min_degree = len(graph[nodes_by_degree[0]])
154
+
155
+ # abort condition (handle complete graph)
156
+ if min_degree == len(graph) - 1:
157
+ return None
158
+
159
+ for node in nodes_by_degree:
160
+ num_fill_in = 0
161
+ nbrs = graph[node]
162
+ for nbr in nbrs:
163
+ # count how many nodes in nbrs current nbr is not connected to
164
+ # subtract 1 for the node itself
165
+ num_fill_in += len(nbrs - graph[nbr]) - 1
166
+ if num_fill_in >= 2 * min_fill_in:
167
+ break
168
+
169
+ num_fill_in /= 2 # divide by 2 because of double counting
170
+
171
+ if num_fill_in < min_fill_in: # update min-fill-in node
172
+ if num_fill_in == 0:
173
+ return node
174
+ min_fill_in = num_fill_in
175
+ min_fill_in_node = node
176
+
177
+ return min_fill_in_node
178
+
179
+
180
+ @nx._dispatchable(returns_graph=True)
181
+ def treewidth_decomp(G, heuristic=min_fill_in_heuristic):
182
+ """Returns a treewidth decomposition using the passed heuristic.
183
+
184
+ Parameters
185
+ ----------
186
+ G : NetworkX graph
187
+ heuristic : heuristic function
188
+
189
+ Returns
190
+ -------
191
+ Treewidth decomposition : (int, Graph) tuple
192
+ 2-tuple with treewidth and the corresponding decomposed tree.
193
+ """
194
+
195
+ # make dict-of-sets structure
196
+ graph = {n: set(G[n]) - {n} for n in G}
197
+
198
+ # stack containing nodes and neighbors in the order from the heuristic
199
+ node_stack = []
200
+
201
+ # get first node from heuristic
202
+ elim_node = heuristic(graph)
203
+ while elim_node is not None:
204
+ # connect all neighbors with each other
205
+ nbrs = graph[elim_node]
206
+ for u, v in itertools.permutations(nbrs, 2):
207
+ if v not in graph[u]:
208
+ graph[u].add(v)
209
+
210
+ # push node and its current neighbors on stack
211
+ node_stack.append((elim_node, nbrs))
212
+
213
+ # remove node from graph
214
+ for u in graph[elim_node]:
215
+ graph[u].remove(elim_node)
216
+
217
+ del graph[elim_node]
218
+ elim_node = heuristic(graph)
219
+
220
+ # the abort condition is met; put all remaining nodes into one bag
221
+ decomp = nx.Graph()
222
+ first_bag = frozenset(graph.keys())
223
+ decomp.add_node(first_bag)
224
+
225
+ treewidth = len(first_bag) - 1
226
+
227
+ while node_stack:
228
+ # get node and its neighbors from the stack
229
+ (curr_node, nbrs) = node_stack.pop()
230
+
231
+ # find a bag all neighbors are in
232
+ old_bag = None
233
+ for bag in decomp.nodes:
234
+ if nbrs <= bag:
235
+ old_bag = bag
236
+ break
237
+
238
+ if old_bag is None:
239
+ # no old_bag was found: just connect to the first_bag
240
+ old_bag = first_bag
241
+
242
+ # create new node for decomposition
243
+ nbrs.add(curr_node)
244
+ new_bag = frozenset(nbrs)
245
+
246
+ # update treewidth
247
+ treewidth = max(treewidth, len(new_bag) - 1)
248
+
249
+ # add edge to decomposition (implicitly also adds the new node)
250
+ decomp.add_edge(old_bag, new_bag)
251
+
252
+ return treewidth, decomp
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/approximation/vertex_cover.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing an approximate minimum weight vertex cover.
2
+
3
+ A |vertex cover|_ is a subset of nodes such that each edge in the graph
4
+ is incident to at least one node in the subset.
5
+
6
+ .. _vertex cover: https://en.wikipedia.org/wiki/Vertex_cover
7
+ .. |vertex cover| replace:: *vertex cover*
8
+
9
+ """
10
+ import networkx as nx
11
+
12
+ __all__ = ["min_weighted_vertex_cover"]
13
+
14
+
15
+ @nx._dispatchable(node_attrs="weight")
16
+ def min_weighted_vertex_cover(G, weight=None):
17
+ r"""Returns an approximate minimum weighted vertex cover.
18
+
19
+ The set of nodes returned by this function is guaranteed to be a
20
+ vertex cover, and the total weight of the set is guaranteed to be at
21
+ most twice the total weight of the minimum weight vertex cover. In
22
+ other words,
23
+
24
+ .. math::
25
+
26
+ w(S) \leq 2 * w(S^*),
27
+
28
+ where $S$ is the vertex cover returned by this function,
29
+ $S^*$ is the vertex cover of minimum weight out of all vertex
30
+ covers of the graph, and $w$ is the function that computes the
31
+ sum of the weights of each node in that given set.
32
+
33
+ Parameters
34
+ ----------
35
+ G : NetworkX graph
36
+
37
+ weight : string, optional (default = None)
38
+ If None, every node has weight 1. If a string, use this node
39
+ attribute as the node weight. A node without this attribute is
40
+ assumed to have weight 1.
41
+
42
+ Returns
43
+ -------
44
+ min_weighted_cover : set
45
+ Returns a set of nodes whose weight sum is no more than twice
46
+ the weight sum of the minimum weight vertex cover.
47
+
48
+ Notes
49
+ -----
50
+ For a directed graph, a vertex cover has the same definition: a set
51
+ of nodes such that each edge in the graph is incident to at least
52
+ one node in the set. Whether the node is the head or tail of the
53
+ directed edge is ignored.
54
+
55
+ This is the local-ratio algorithm for computing an approximate
56
+ vertex cover. The algorithm greedily reduces the costs over edges,
57
+ iteratively building a cover. The worst-case runtime of this
58
+ implementation is $O(m \log n)$, where $n$ is the number
59
+ of nodes and $m$ the number of edges in the graph.
60
+
61
+ References
62
+ ----------
63
+ .. [1] Bar-Yehuda, R., and Even, S. (1985). "A local-ratio theorem for
64
+ approximating the weighted vertex cover problem."
65
+ *Annals of Discrete Mathematics*, 25, 27–46
66
+ <http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf>
67
+
68
+ """
69
+ cost = dict(G.nodes(data=weight, default=1))
70
+ # While there are uncovered edges, choose an uncovered and update
71
+ # the cost of the remaining edges.
72
+ cover = set()
73
+ for u, v in G.edges():
74
+ if u in cover or v in cover:
75
+ continue
76
+ if cost[u] <= cost[v]:
77
+ cover.add(u)
78
+ cost[v] -= cost[u]
79
+ else:
80
+ cover.add(v)
81
+ cost[u] -= cost[v]
82
+ return cover
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-310.pyc ADDED
Binary file (8.48 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-310.pyc ADDED
Binary file (9.15 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-310.pyc ADDED
Binary file (7.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-310.pyc ADDED
Binary file (2.28 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-310.pyc ADDED
Binary file (18.9 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-310.pyc ADDED
Binary file (1.93 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (200 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-310.pyc ADDED
Binary file (6.07 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-310.pyc ADDED
Binary file (5.34 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-310.pyc ADDED
Binary file (3.35 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-310.pyc ADDED
Binary file (1.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-310.pyc ADDED
Binary file (7.35 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-310.pyc ADDED
Binary file (5.24 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-310.pyc ADDED
Binary file (9.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-310.pyc ADDED
Binary file (12.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-310.pyc ADDED
Binary file (4.7 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-310.pyc ADDED
Binary file (11.7 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-310.pyc ADDED
Binary file (1.42 kB). View file