applied-ai-018 commited on
Commit
8afcaf1
·
verified ·
1 Parent(s): e4b02bf

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/louvain.py +382 -0
  2. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  3. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-310.pyc +0 -0
  4. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-310.pyc +0 -0
  5. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_divisive.cpython-310.pyc +0 -0
  6. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-310.pyc +0 -0
  7. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-310.pyc +0 -0
  8. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-310.pyc +0 -0
  9. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-310.pyc +0 -0
  10. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-310.pyc +0 -0
  11. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-310.pyc +0 -0
  12. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-310.pyc +0 -0
  13. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-310.pyc +0 -0
  14. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__init__.py +6 -0
  15. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-310.pyc +0 -0
  16. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-310.pyc +0 -0
  17. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-310.pyc +0 -0
  18. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-310.pyc +0 -0
  19. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-310.pyc +0 -0
  20. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-310.pyc +0 -0
  21. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-310.pyc +0 -0
  22. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/attracting.py +114 -0
  23. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/biconnected.py +393 -0
  24. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/connected.py +214 -0
  25. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/semiconnected.py +70 -0
  26. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/strongly_connected.py +430 -0
  27. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__init__.py +0 -0
  28. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  29. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-310.pyc +0 -0
  30. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-310.pyc +0 -0
  31. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_connected.cpython-310.pyc +0 -0
  32. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_semiconnected.cpython-310.pyc +0 -0
  33. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_strongly_connected.cpython-310.pyc +0 -0
  34. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_weakly_connected.cpython-310.pyc +0 -0
  35. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_attracting.py +70 -0
  36. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_biconnected.py +248 -0
  37. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_connected.py +117 -0
  38. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_semiconnected.py +55 -0
  39. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py +203 -0
  40. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py +96 -0
  41. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/weakly_connected.py +193 -0
  42. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2 +3 -0
  43. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py +7 -0
  44. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py +1163 -0
  45. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py +248 -0
  46. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py +1065 -0
  47. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py +351 -0
  48. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py +304 -0
  49. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py +0 -0
  50. env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/louvain.py ADDED
@@ -0,0 +1,382 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Function for detecting communities based on Louvain Community Detection
2
+ Algorithm"""
3
+
4
+ import itertools
5
+ from collections import defaultdict, deque
6
+
7
+ import networkx as nx
8
+ from networkx.algorithms.community import modularity
9
+ from networkx.utils import py_random_state
10
+
11
+ __all__ = ["louvain_communities", "louvain_partitions"]
12
+
13
+
14
+ @py_random_state("seed")
15
+ @nx._dispatchable(edge_attrs="weight")
16
+ def louvain_communities(
17
+ G, weight="weight", resolution=1, threshold=0.0000001, max_level=None, seed=None
18
+ ):
19
+ r"""Find the best partition of a graph using the Louvain Community Detection
20
+ Algorithm.
21
+
22
+ Louvain Community Detection Algorithm is a simple method to extract the community
23
+ structure of a network. This is a heuristic method based on modularity optimization. [1]_
24
+
25
+ The algorithm works in 2 steps. On the first step it assigns every node to be
26
+ in its own community and then for each node it tries to find the maximum positive
27
+ modularity gain by moving each node to all of its neighbor communities. If no positive
28
+ gain is achieved the node remains in its original community.
29
+
30
+ The modularity gain obtained by moving an isolated node $i$ into a community $C$ can
31
+ easily be calculated by the following formula (combining [1]_ [2]_ and some algebra):
32
+
33
+ .. math::
34
+ \Delta Q = \frac{k_{i,in}}{2m} - \gamma\frac{ \Sigma_{tot} \cdot k_i}{2m^2}
35
+
36
+ where $m$ is the size of the graph, $k_{i,in}$ is the sum of the weights of the links
37
+ from $i$ to nodes in $C$, $k_i$ is the sum of the weights of the links incident to node $i$,
38
+ $\Sigma_{tot}$ is the sum of the weights of the links incident to nodes in $C$ and $\gamma$
39
+ is the resolution parameter.
40
+
41
+ For the directed case the modularity gain can be computed using this formula according to [3]_
42
+
43
+ .. math::
44
+ \Delta Q = \frac{k_{i,in}}{m}
45
+ - \gamma\frac{k_i^{out} \cdot\Sigma_{tot}^{in} + k_i^{in} \cdot \Sigma_{tot}^{out}}{m^2}
46
+
47
+ where $k_i^{out}$, $k_i^{in}$ are the outer and inner weighted degrees of node $i$ and
48
+ $\Sigma_{tot}^{in}$, $\Sigma_{tot}^{out}$ are the sum of in-going and out-going links incident
49
+ to nodes in $C$.
50
+
51
+ The first phase continues until no individual move can improve the modularity.
52
+
53
+ The second phase consists in building a new network whose nodes are now the communities
54
+ found in the first phase. To do so, the weights of the links between the new nodes are given by
55
+ the sum of the weight of the links between nodes in the corresponding two communities. Once this
56
+ phase is complete it is possible to reapply the first phase creating bigger communities with
57
+ increased modularity.
58
+
59
+ The above two phases are executed until no modularity gain is achieved (or is less than
60
+ the `threshold`, or until `max_levels` is reached).
61
+
62
+ Be careful with self-loops in the input graph. These are treated as
63
+ previously reduced communities -- as if the process had been started
64
+ in the middle of the algorithm. Large self-loop edge weights thus
65
+ represent strong communities and in practice may be hard to add
66
+ other nodes to. If your input graph edge weights for self-loops
67
+ do not represent already reduced communities you may want to remove
68
+ the self-loops before inputting that graph.
69
+
70
+ Parameters
71
+ ----------
72
+ G : NetworkX graph
73
+ weight : string or None, optional (default="weight")
74
+ The name of an edge attribute that holds the numerical value
75
+ used as a weight. If None then each edge has weight 1.
76
+ resolution : float, optional (default=1)
77
+ If resolution is less than 1, the algorithm favors larger communities.
78
+ Greater than 1 favors smaller communities
79
+ threshold : float, optional (default=0.0000001)
80
+ Modularity gain threshold for each level. If the gain of modularity
81
+ between 2 levels of the algorithm is less than the given threshold
82
+ then the algorithm stops and returns the resulting communities.
83
+ max_level : int or None, optional (default=None)
84
+ The maximum number of levels (steps of the algorithm) to compute.
85
+ Must be a positive integer or None. If None, then there is no max
86
+ level and the threshold parameter determines the stopping condition.
87
+ seed : integer, random_state, or None (default)
88
+ Indicator of random number generation state.
89
+ See :ref:`Randomness<randomness>`.
90
+
91
+ Returns
92
+ -------
93
+ list
94
+ A list of sets (partition of `G`). Each set represents one community and contains
95
+ all the nodes that constitute it.
96
+
97
+ Examples
98
+ --------
99
+ >>> import networkx as nx
100
+ >>> G = nx.petersen_graph()
101
+ >>> nx.community.louvain_communities(G, seed=123)
102
+ [{0, 4, 5, 7, 9}, {1, 2, 3, 6, 8}]
103
+
104
+ Notes
105
+ -----
106
+ The order in which the nodes are considered can affect the final output. In the algorithm
107
+ the ordering happens using a random shuffle.
108
+
109
+ References
110
+ ----------
111
+ .. [1] Blondel, V.D. et al. Fast unfolding of communities in
112
+ large networks. J. Stat. Mech 10008, 1-12(2008). https://doi.org/10.1088/1742-5468/2008/10/P10008
113
+ .. [2] Traag, V.A., Waltman, L. & van Eck, N.J. From Louvain to Leiden: guaranteeing
114
+ well-connected communities. Sci Rep 9, 5233 (2019). https://doi.org/10.1038/s41598-019-41695-z
115
+ .. [3] Nicolas Dugué, Anthony Perez. Directed Louvain : maximizing modularity in directed networks.
116
+ [Research Report] Université d’Orléans. 2015. hal-01231784. https://hal.archives-ouvertes.fr/hal-01231784
117
+
118
+ See Also
119
+ --------
120
+ louvain_partitions
121
+ """
122
+
123
+ partitions = louvain_partitions(G, weight, resolution, threshold, seed)
124
+ if max_level is not None:
125
+ if max_level <= 0:
126
+ raise ValueError("max_level argument must be a positive integer or None")
127
+ partitions = itertools.islice(partitions, max_level)
128
+ final_partition = deque(partitions, maxlen=1)
129
+ return final_partition.pop()
130
+
131
+
132
+ @py_random_state("seed")
133
+ @nx._dispatchable(edge_attrs="weight")
134
+ def louvain_partitions(
135
+ G, weight="weight", resolution=1, threshold=0.0000001, seed=None
136
+ ):
137
+ """Yields partitions for each level of the Louvain Community Detection Algorithm
138
+
139
+ Louvain Community Detection Algorithm is a simple method to extract the community
140
+ structure of a network. This is a heuristic method based on modularity optimization. [1]_
141
+
142
+ The partitions at each level (step of the algorithm) form a dendrogram of communities.
143
+ A dendrogram is a diagram representing a tree and each level represents
144
+ a partition of the G graph. The top level contains the smallest communities
145
+ and as you traverse to the bottom of the tree the communities get bigger
146
+ and the overall modularity increases making the partition better.
147
+
148
+ Each level is generated by executing the two phases of the Louvain Community
149
+ Detection Algorithm.
150
+
151
+ Be careful with self-loops in the input graph. These are treated as
152
+ previously reduced communities -- as if the process had been started
153
+ in the middle of the algorithm. Large self-loop edge weights thus
154
+ represent strong communities and in practice may be hard to add
155
+ other nodes to. If your input graph edge weights for self-loops
156
+ do not represent already reduced communities you may want to remove
157
+ the self-loops before inputting that graph.
158
+
159
+ Parameters
160
+ ----------
161
+ G : NetworkX graph
162
+ weight : string or None, optional (default="weight")
163
+ The name of an edge attribute that holds the numerical value
164
+ used as a weight. If None then each edge has weight 1.
165
+ resolution : float, optional (default=1)
166
+ If resolution is less than 1, the algorithm favors larger communities.
167
+ Greater than 1 favors smaller communities
168
+ threshold : float, optional (default=0.0000001)
169
+ Modularity gain threshold for each level. If the gain of modularity
170
+ between 2 levels of the algorithm is less than the given threshold
171
+ then the algorithm stops and returns the resulting communities.
172
+ seed : integer, random_state, or None (default)
173
+ Indicator of random number generation state.
174
+ See :ref:`Randomness<randomness>`.
175
+
176
+ Yields
177
+ ------
178
+ list
179
+ A list of sets (partition of `G`). Each set represents one community and contains
180
+ all the nodes that constitute it.
181
+
182
+ References
183
+ ----------
184
+ .. [1] Blondel, V.D. et al. Fast unfolding of communities in
185
+ large networks. J. Stat. Mech 10008, 1-12(2008)
186
+
187
+ See Also
188
+ --------
189
+ louvain_communities
190
+ """
191
+
192
+ partition = [{u} for u in G.nodes()]
193
+ if nx.is_empty(G):
194
+ yield partition
195
+ return
196
+ mod = modularity(G, partition, resolution=resolution, weight=weight)
197
+ is_directed = G.is_directed()
198
+ if G.is_multigraph():
199
+ graph = _convert_multigraph(G, weight, is_directed)
200
+ else:
201
+ graph = G.__class__()
202
+ graph.add_nodes_from(G)
203
+ graph.add_weighted_edges_from(G.edges(data=weight, default=1))
204
+
205
+ m = graph.size(weight="weight")
206
+ partition, inner_partition, improvement = _one_level(
207
+ graph, m, partition, resolution, is_directed, seed
208
+ )
209
+ improvement = True
210
+ while improvement:
211
+ # gh-5901 protect the sets in the yielded list from further manipulation here
212
+ yield [s.copy() for s in partition]
213
+ new_mod = modularity(
214
+ graph, inner_partition, resolution=resolution, weight="weight"
215
+ )
216
+ if new_mod - mod <= threshold:
217
+ return
218
+ mod = new_mod
219
+ graph = _gen_graph(graph, inner_partition)
220
+ partition, inner_partition, improvement = _one_level(
221
+ graph, m, partition, resolution, is_directed, seed
222
+ )
223
+
224
+
225
+ def _one_level(G, m, partition, resolution=1, is_directed=False, seed=None):
226
+ """Calculate one level of the Louvain partitions tree
227
+
228
+ Parameters
229
+ ----------
230
+ G : NetworkX Graph/DiGraph
231
+ The graph from which to detect communities
232
+ m : number
233
+ The size of the graph `G`.
234
+ partition : list of sets of nodes
235
+ A valid partition of the graph `G`
236
+ resolution : positive number
237
+ The resolution parameter for computing the modularity of a partition
238
+ is_directed : bool
239
+ True if `G` is a directed graph.
240
+ seed : integer, random_state, or None (default)
241
+ Indicator of random number generation state.
242
+ See :ref:`Randomness<randomness>`.
243
+
244
+ """
245
+ node2com = {u: i for i, u in enumerate(G.nodes())}
246
+ inner_partition = [{u} for u in G.nodes()]
247
+ if is_directed:
248
+ in_degrees = dict(G.in_degree(weight="weight"))
249
+ out_degrees = dict(G.out_degree(weight="weight"))
250
+ Stot_in = list(in_degrees.values())
251
+ Stot_out = list(out_degrees.values())
252
+ # Calculate weights for both in and out neighbors without considering self-loops
253
+ nbrs = {}
254
+ for u in G:
255
+ nbrs[u] = defaultdict(float)
256
+ for _, n, wt in G.out_edges(u, data="weight"):
257
+ if u != n:
258
+ nbrs[u][n] += wt
259
+ for n, _, wt in G.in_edges(u, data="weight"):
260
+ if u != n:
261
+ nbrs[u][n] += wt
262
+ else:
263
+ degrees = dict(G.degree(weight="weight"))
264
+ Stot = list(degrees.values())
265
+ nbrs = {u: {v: data["weight"] for v, data in G[u].items() if v != u} for u in G}
266
+ rand_nodes = list(G.nodes)
267
+ seed.shuffle(rand_nodes)
268
+ nb_moves = 1
269
+ improvement = False
270
+ while nb_moves > 0:
271
+ nb_moves = 0
272
+ for u in rand_nodes:
273
+ best_mod = 0
274
+ best_com = node2com[u]
275
+ weights2com = _neighbor_weights(nbrs[u], node2com)
276
+ if is_directed:
277
+ in_degree = in_degrees[u]
278
+ out_degree = out_degrees[u]
279
+ Stot_in[best_com] -= in_degree
280
+ Stot_out[best_com] -= out_degree
281
+ remove_cost = (
282
+ -weights2com[best_com] / m
283
+ + resolution
284
+ * (out_degree * Stot_in[best_com] + in_degree * Stot_out[best_com])
285
+ / m**2
286
+ )
287
+ else:
288
+ degree = degrees[u]
289
+ Stot[best_com] -= degree
290
+ remove_cost = -weights2com[best_com] / m + resolution * (
291
+ Stot[best_com] * degree
292
+ ) / (2 * m**2)
293
+ for nbr_com, wt in weights2com.items():
294
+ if is_directed:
295
+ gain = (
296
+ remove_cost
297
+ + wt / m
298
+ - resolution
299
+ * (
300
+ out_degree * Stot_in[nbr_com]
301
+ + in_degree * Stot_out[nbr_com]
302
+ )
303
+ / m**2
304
+ )
305
+ else:
306
+ gain = (
307
+ remove_cost
308
+ + wt / m
309
+ - resolution * (Stot[nbr_com] * degree) / (2 * m**2)
310
+ )
311
+ if gain > best_mod:
312
+ best_mod = gain
313
+ best_com = nbr_com
314
+ if is_directed:
315
+ Stot_in[best_com] += in_degree
316
+ Stot_out[best_com] += out_degree
317
+ else:
318
+ Stot[best_com] += degree
319
+ if best_com != node2com[u]:
320
+ com = G.nodes[u].get("nodes", {u})
321
+ partition[node2com[u]].difference_update(com)
322
+ inner_partition[node2com[u]].remove(u)
323
+ partition[best_com].update(com)
324
+ inner_partition[best_com].add(u)
325
+ improvement = True
326
+ nb_moves += 1
327
+ node2com[u] = best_com
328
+ partition = list(filter(len, partition))
329
+ inner_partition = list(filter(len, inner_partition))
330
+ return partition, inner_partition, improvement
331
+
332
+
333
+ def _neighbor_weights(nbrs, node2com):
334
+ """Calculate weights between node and its neighbor communities.
335
+
336
+ Parameters
337
+ ----------
338
+ nbrs : dictionary
339
+ Dictionary with nodes' neighbors as keys and their edge weight as value.
340
+ node2com : dictionary
341
+ Dictionary with all graph's nodes as keys and their community index as value.
342
+
343
+ """
344
+ weights = defaultdict(float)
345
+ for nbr, wt in nbrs.items():
346
+ weights[node2com[nbr]] += wt
347
+ return weights
348
+
349
+
350
+ def _gen_graph(G, partition):
351
+ """Generate a new graph based on the partitions of a given graph"""
352
+ H = G.__class__()
353
+ node2com = {}
354
+ for i, part in enumerate(partition):
355
+ nodes = set()
356
+ for node in part:
357
+ node2com[node] = i
358
+ nodes.update(G.nodes[node].get("nodes", {node}))
359
+ H.add_node(i, nodes=nodes)
360
+
361
+ for node1, node2, wt in G.edges(data=True):
362
+ wt = wt["weight"]
363
+ com1 = node2com[node1]
364
+ com2 = node2com[node2]
365
+ temp = H.get_edge_data(com1, com2, {"weight": 0})["weight"]
366
+ H.add_edge(com1, com2, weight=wt + temp)
367
+ return H
368
+
369
+
370
+ def _convert_multigraph(G, weight, is_directed):
371
+ """Convert a Multigraph to normal Graph"""
372
+ if is_directed:
373
+ H = nx.DiGraph()
374
+ else:
375
+ H = nx.Graph()
376
+ H.add_nodes_from(G)
377
+ for u, v, wt in G.edges(data=weight, default=1):
378
+ if H.has_edge(u, v):
379
+ H[u][v]["weight"] += wt
380
+ else:
381
+ H.add_edge(u, v, weight=wt)
382
+ return H
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (200 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-310.pyc ADDED
Binary file (3.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-310.pyc ADDED
Binary file (3.37 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_divisive.cpython-310.pyc ADDED
Binary file (3.71 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-310.pyc ADDED
Binary file (2.94 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-310.pyc ADDED
Binary file (3.07 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-310.pyc ADDED
Binary file (10.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-310.pyc ADDED
Binary file (7.33 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-310.pyc ADDED
Binary file (3.55 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-310.pyc ADDED
Binary file (8.08 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-310.pyc ADDED
Binary file (4.66 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-310.pyc ADDED
Binary file (1.09 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from .connected import *
2
+ from .strongly_connected import *
3
+ from .weakly_connected import *
4
+ from .attracting import *
5
+ from .biconnected import *
6
+ from .semiconnected import *
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (349 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-310.pyc ADDED
Binary file (3.11 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-310.pyc ADDED
Binary file (11.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-310.pyc ADDED
Binary file (4.82 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-310.pyc ADDED
Binary file (2.42 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-310.pyc ADDED
Binary file (11.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-310.pyc ADDED
Binary file (4.47 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/attracting.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Attracting components."""
2
+ import networkx as nx
3
+ from networkx.utils.decorators import not_implemented_for
4
+
5
+ __all__ = [
6
+ "number_attracting_components",
7
+ "attracting_components",
8
+ "is_attracting_component",
9
+ ]
10
+
11
+
12
+ @not_implemented_for("undirected")
13
+ @nx._dispatchable
14
+ def attracting_components(G):
15
+ """Generates the attracting components in `G`.
16
+
17
+ An attracting component in a directed graph `G` is a strongly connected
18
+ component with the property that a random walker on the graph will never
19
+ leave the component, once it enters the component.
20
+
21
+ The nodes in attracting components can also be thought of as recurrent
22
+ nodes. If a random walker enters the attractor containing the node, then
23
+ the node will be visited infinitely often.
24
+
25
+ To obtain induced subgraphs on each component use:
26
+ ``(G.subgraph(c).copy() for c in attracting_components(G))``
27
+
28
+ Parameters
29
+ ----------
30
+ G : DiGraph, MultiDiGraph
31
+ The graph to be analyzed.
32
+
33
+ Returns
34
+ -------
35
+ attractors : generator of sets
36
+ A generator of sets of nodes, one for each attracting component of G.
37
+
38
+ Raises
39
+ ------
40
+ NetworkXNotImplemented
41
+ If the input graph is undirected.
42
+
43
+ See Also
44
+ --------
45
+ number_attracting_components
46
+ is_attracting_component
47
+
48
+ """
49
+ scc = list(nx.strongly_connected_components(G))
50
+ cG = nx.condensation(G, scc)
51
+ for n in cG:
52
+ if cG.out_degree(n) == 0:
53
+ yield scc[n]
54
+
55
+
56
+ @not_implemented_for("undirected")
57
+ @nx._dispatchable
58
+ def number_attracting_components(G):
59
+ """Returns the number of attracting components in `G`.
60
+
61
+ Parameters
62
+ ----------
63
+ G : DiGraph, MultiDiGraph
64
+ The graph to be analyzed.
65
+
66
+ Returns
67
+ -------
68
+ n : int
69
+ The number of attracting components in G.
70
+
71
+ Raises
72
+ ------
73
+ NetworkXNotImplemented
74
+ If the input graph is undirected.
75
+
76
+ See Also
77
+ --------
78
+ attracting_components
79
+ is_attracting_component
80
+
81
+ """
82
+ return sum(1 for ac in attracting_components(G))
83
+
84
+
85
+ @not_implemented_for("undirected")
86
+ @nx._dispatchable
87
+ def is_attracting_component(G):
88
+ """Returns True if `G` consists of a single attracting component.
89
+
90
+ Parameters
91
+ ----------
92
+ G : DiGraph, MultiDiGraph
93
+ The graph to be analyzed.
94
+
95
+ Returns
96
+ -------
97
+ attracting : bool
98
+ True if `G` has a single attracting component. Otherwise, False.
99
+
100
+ Raises
101
+ ------
102
+ NetworkXNotImplemented
103
+ If the input graph is undirected.
104
+
105
+ See Also
106
+ --------
107
+ attracting_components
108
+ number_attracting_components
109
+
110
+ """
111
+ ac = list(attracting_components(G))
112
+ if len(ac) == 1:
113
+ return len(ac[0]) == len(G)
114
+ return False
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/biconnected.py ADDED
@@ -0,0 +1,393 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Biconnected components and articulation points."""
2
+ from itertools import chain
3
+
4
+ import networkx as nx
5
+ from networkx.utils.decorators import not_implemented_for
6
+
7
+ __all__ = [
8
+ "biconnected_components",
9
+ "biconnected_component_edges",
10
+ "is_biconnected",
11
+ "articulation_points",
12
+ ]
13
+
14
+
15
+ @not_implemented_for("directed")
16
+ @nx._dispatchable
17
+ def is_biconnected(G):
18
+ """Returns True if the graph is biconnected, False otherwise.
19
+
20
+ A graph is biconnected if, and only if, it cannot be disconnected by
21
+ removing only one node (and all edges incident on that node). If
22
+ removing a node increases the number of disconnected components
23
+ in the graph, that node is called an articulation point, or cut
24
+ vertex. A biconnected graph has no articulation points.
25
+
26
+ Parameters
27
+ ----------
28
+ G : NetworkX Graph
29
+ An undirected graph.
30
+
31
+ Returns
32
+ -------
33
+ biconnected : bool
34
+ True if the graph is biconnected, False otherwise.
35
+
36
+ Raises
37
+ ------
38
+ NetworkXNotImplemented
39
+ If the input graph is not undirected.
40
+
41
+ Examples
42
+ --------
43
+ >>> G = nx.path_graph(4)
44
+ >>> print(nx.is_biconnected(G))
45
+ False
46
+ >>> G.add_edge(0, 3)
47
+ >>> print(nx.is_biconnected(G))
48
+ True
49
+
50
+ See Also
51
+ --------
52
+ biconnected_components
53
+ articulation_points
54
+ biconnected_component_edges
55
+ is_strongly_connected
56
+ is_weakly_connected
57
+ is_connected
58
+ is_semiconnected
59
+
60
+ Notes
61
+ -----
62
+ The algorithm to find articulation points and biconnected
63
+ components is implemented using a non-recursive depth-first-search
64
+ (DFS) that keeps track of the highest level that back edges reach
65
+ in the DFS tree. A node `n` is an articulation point if, and only
66
+ if, there exists a subtree rooted at `n` such that there is no
67
+ back edge from any successor of `n` that links to a predecessor of
68
+ `n` in the DFS tree. By keeping track of all the edges traversed
69
+ by the DFS we can obtain the biconnected components because all
70
+ edges of a bicomponent will be traversed consecutively between
71
+ articulation points.
72
+
73
+ References
74
+ ----------
75
+ .. [1] Hopcroft, J.; Tarjan, R. (1973).
76
+ "Efficient algorithms for graph manipulation".
77
+ Communications of the ACM 16: 372–378. doi:10.1145/362248.362272
78
+
79
+ """
80
+ bccs = biconnected_components(G)
81
+ try:
82
+ bcc = next(bccs)
83
+ except StopIteration:
84
+ # No bicomponents (empty graph?)
85
+ return False
86
+ try:
87
+ next(bccs)
88
+ except StopIteration:
89
+ # Only one bicomponent
90
+ return len(bcc) == len(G)
91
+ else:
92
+ # Multiple bicomponents
93
+ return False
94
+
95
+
96
+ @not_implemented_for("directed")
97
+ @nx._dispatchable
98
+ def biconnected_component_edges(G):
99
+ """Returns a generator of lists of edges, one list for each biconnected
100
+ component of the input graph.
101
+
102
+ Biconnected components are maximal subgraphs such that the removal of a
103
+ node (and all edges incident on that node) will not disconnect the
104
+ subgraph. Note that nodes may be part of more than one biconnected
105
+ component. Those nodes are articulation points, or cut vertices.
106
+ However, each edge belongs to one, and only one, biconnected component.
107
+
108
+ Notice that by convention a dyad is considered a biconnected component.
109
+
110
+ Parameters
111
+ ----------
112
+ G : NetworkX Graph
113
+ An undirected graph.
114
+
115
+ Returns
116
+ -------
117
+ edges : generator of lists
118
+ Generator of lists of edges, one list for each bicomponent.
119
+
120
+ Raises
121
+ ------
122
+ NetworkXNotImplemented
123
+ If the input graph is not undirected.
124
+
125
+ Examples
126
+ --------
127
+ >>> G = nx.barbell_graph(4, 2)
128
+ >>> print(nx.is_biconnected(G))
129
+ False
130
+ >>> bicomponents_edges = list(nx.biconnected_component_edges(G))
131
+ >>> len(bicomponents_edges)
132
+ 5
133
+ >>> G.add_edge(2, 8)
134
+ >>> print(nx.is_biconnected(G))
135
+ True
136
+ >>> bicomponents_edges = list(nx.biconnected_component_edges(G))
137
+ >>> len(bicomponents_edges)
138
+ 1
139
+
140
+ See Also
141
+ --------
142
+ is_biconnected,
143
+ biconnected_components,
144
+ articulation_points,
145
+
146
+ Notes
147
+ -----
148
+ The algorithm to find articulation points and biconnected
149
+ components is implemented using a non-recursive depth-first-search
150
+ (DFS) that keeps track of the highest level that back edges reach
151
+ in the DFS tree. A node `n` is an articulation point if, and only
152
+ if, there exists a subtree rooted at `n` such that there is no
153
+ back edge from any successor of `n` that links to a predecessor of
154
+ `n` in the DFS tree. By keeping track of all the edges traversed
155
+ by the DFS we can obtain the biconnected components because all
156
+ edges of a bicomponent will be traversed consecutively between
157
+ articulation points.
158
+
159
+ References
160
+ ----------
161
+ .. [1] Hopcroft, J.; Tarjan, R. (1973).
162
+ "Efficient algorithms for graph manipulation".
163
+ Communications of the ACM 16: 372–378. doi:10.1145/362248.362272
164
+
165
+ """
166
+ yield from _biconnected_dfs(G, components=True)
167
+
168
+
169
+ @not_implemented_for("directed")
170
+ @nx._dispatchable
171
+ def biconnected_components(G):
172
+ """Returns a generator of sets of nodes, one set for each biconnected
173
+ component of the graph
174
+
175
+ Biconnected components are maximal subgraphs such that the removal of a
176
+ node (and all edges incident on that node) will not disconnect the
177
+ subgraph. Note that nodes may be part of more than one biconnected
178
+ component. Those nodes are articulation points, or cut vertices. The
179
+ removal of articulation points will increase the number of connected
180
+ components of the graph.
181
+
182
+ Notice that by convention a dyad is considered a biconnected component.
183
+
184
+ Parameters
185
+ ----------
186
+ G : NetworkX Graph
187
+ An undirected graph.
188
+
189
+ Returns
190
+ -------
191
+ nodes : generator
192
+ Generator of sets of nodes, one set for each biconnected component.
193
+
194
+ Raises
195
+ ------
196
+ NetworkXNotImplemented
197
+ If the input graph is not undirected.
198
+
199
+ Examples
200
+ --------
201
+ >>> G = nx.lollipop_graph(5, 1)
202
+ >>> print(nx.is_biconnected(G))
203
+ False
204
+ >>> bicomponents = list(nx.biconnected_components(G))
205
+ >>> len(bicomponents)
206
+ 2
207
+ >>> G.add_edge(0, 5)
208
+ >>> print(nx.is_biconnected(G))
209
+ True
210
+ >>> bicomponents = list(nx.biconnected_components(G))
211
+ >>> len(bicomponents)
212
+ 1
213
+
214
+ You can generate a sorted list of biconnected components, largest
215
+ first, using sort.
216
+
217
+ >>> G.remove_edge(0, 5)
218
+ >>> [len(c) for c in sorted(nx.biconnected_components(G), key=len, reverse=True)]
219
+ [5, 2]
220
+
221
+ If you only want the largest connected component, it's more
222
+ efficient to use max instead of sort.
223
+
224
+ >>> Gc = max(nx.biconnected_components(G), key=len)
225
+
226
+ To create the components as subgraphs use:
227
+ ``(G.subgraph(c).copy() for c in biconnected_components(G))``
228
+
229
+ See Also
230
+ --------
231
+ is_biconnected
232
+ articulation_points
233
+ biconnected_component_edges
234
+ k_components : this function is a special case where k=2
235
+ bridge_components : similar to this function, but is defined using
236
+ 2-edge-connectivity instead of 2-node-connectivity.
237
+
238
+ Notes
239
+ -----
240
+ The algorithm to find articulation points and biconnected
241
+ components is implemented using a non-recursive depth-first-search
242
+ (DFS) that keeps track of the highest level that back edges reach
243
+ in the DFS tree. A node `n` is an articulation point if, and only
244
+ if, there exists a subtree rooted at `n` such that there is no
245
+ back edge from any successor of `n` that links to a predecessor of
246
+ `n` in the DFS tree. By keeping track of all the edges traversed
247
+ by the DFS we can obtain the biconnected components because all
248
+ edges of a bicomponent will be traversed consecutively between
249
+ articulation points.
250
+
251
+ References
252
+ ----------
253
+ .. [1] Hopcroft, J.; Tarjan, R. (1973).
254
+ "Efficient algorithms for graph manipulation".
255
+ Communications of the ACM 16: 372–378. doi:10.1145/362248.362272
256
+
257
+ """
258
+ for comp in _biconnected_dfs(G, components=True):
259
+ yield set(chain.from_iterable(comp))
260
+
261
+
262
+ @not_implemented_for("directed")
263
+ @nx._dispatchable
264
+ def articulation_points(G):
265
+ """Yield the articulation points, or cut vertices, of a graph.
266
+
267
+ An articulation point or cut vertex is any node whose removal (along with
268
+ all its incident edges) increases the number of connected components of
269
+ a graph. An undirected connected graph without articulation points is
270
+ biconnected. Articulation points belong to more than one biconnected
271
+ component of a graph.
272
+
273
+ Notice that by convention a dyad is considered a biconnected component.
274
+
275
+ Parameters
276
+ ----------
277
+ G : NetworkX Graph
278
+ An undirected graph.
279
+
280
+ Yields
281
+ ------
282
+ node
283
+ An articulation point in the graph.
284
+
285
+ Raises
286
+ ------
287
+ NetworkXNotImplemented
288
+ If the input graph is not undirected.
289
+
290
+ Examples
291
+ --------
292
+
293
+ >>> G = nx.barbell_graph(4, 2)
294
+ >>> print(nx.is_biconnected(G))
295
+ False
296
+ >>> len(list(nx.articulation_points(G)))
297
+ 4
298
+ >>> G.add_edge(2, 8)
299
+ >>> print(nx.is_biconnected(G))
300
+ True
301
+ >>> len(list(nx.articulation_points(G)))
302
+ 0
303
+
304
+ See Also
305
+ --------
306
+ is_biconnected
307
+ biconnected_components
308
+ biconnected_component_edges
309
+
310
+ Notes
311
+ -----
312
+ The algorithm to find articulation points and biconnected
313
+ components is implemented using a non-recursive depth-first-search
314
+ (DFS) that keeps track of the highest level that back edges reach
315
+ in the DFS tree. A node `n` is an articulation point if, and only
316
+ if, there exists a subtree rooted at `n` such that there is no
317
+ back edge from any successor of `n` that links to a predecessor of
318
+ `n` in the DFS tree. By keeping track of all the edges traversed
319
+ by the DFS we can obtain the biconnected components because all
320
+ edges of a bicomponent will be traversed consecutively between
321
+ articulation points.
322
+
323
+ References
324
+ ----------
325
+ .. [1] Hopcroft, J.; Tarjan, R. (1973).
326
+ "Efficient algorithms for graph manipulation".
327
+ Communications of the ACM 16: 372–378. doi:10.1145/362248.362272
328
+
329
+ """
330
+ seen = set()
331
+ for articulation in _biconnected_dfs(G, components=False):
332
+ if articulation not in seen:
333
+ seen.add(articulation)
334
+ yield articulation
335
+
336
+
337
+ @not_implemented_for("directed")
338
+ def _biconnected_dfs(G, components=True):
339
+ # depth-first search algorithm to generate articulation points
340
+ # and biconnected components
341
+ visited = set()
342
+ for start in G:
343
+ if start in visited:
344
+ continue
345
+ discovery = {start: 0} # time of first discovery of node during search
346
+ low = {start: 0}
347
+ root_children = 0
348
+ visited.add(start)
349
+ edge_stack = []
350
+ stack = [(start, start, iter(G[start]))]
351
+ edge_index = {}
352
+ while stack:
353
+ grandparent, parent, children = stack[-1]
354
+ try:
355
+ child = next(children)
356
+ if grandparent == child:
357
+ continue
358
+ if child in visited:
359
+ if discovery[child] <= discovery[parent]: # back edge
360
+ low[parent] = min(low[parent], discovery[child])
361
+ if components:
362
+ edge_index[parent, child] = len(edge_stack)
363
+ edge_stack.append((parent, child))
364
+ else:
365
+ low[child] = discovery[child] = len(discovery)
366
+ visited.add(child)
367
+ stack.append((parent, child, iter(G[child])))
368
+ if components:
369
+ edge_index[parent, child] = len(edge_stack)
370
+ edge_stack.append((parent, child))
371
+
372
+ except StopIteration:
373
+ stack.pop()
374
+ if len(stack) > 1:
375
+ if low[parent] >= discovery[grandparent]:
376
+ if components:
377
+ ind = edge_index[grandparent, parent]
378
+ yield edge_stack[ind:]
379
+ del edge_stack[ind:]
380
+
381
+ else:
382
+ yield grandparent
383
+ low[grandparent] = min(low[parent], low[grandparent])
384
+ elif stack: # length 1 so grandparent is root
385
+ root_children += 1
386
+ if components:
387
+ ind = edge_index[grandparent, parent]
388
+ yield edge_stack[ind:]
389
+ del edge_stack[ind:]
390
+ if not components:
391
+ # root node is articulation point if it has more than 1 child
392
+ if root_children > 1:
393
+ yield start
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/connected.py ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Connected components."""
2
+ import networkx as nx
3
+ from networkx.utils.decorators import not_implemented_for
4
+
5
+ from ...utils import arbitrary_element
6
+
7
+ __all__ = [
8
+ "number_connected_components",
9
+ "connected_components",
10
+ "is_connected",
11
+ "node_connected_component",
12
+ ]
13
+
14
+
15
+ @not_implemented_for("directed")
16
+ @nx._dispatchable
17
+ def connected_components(G):
18
+ """Generate connected components.
19
+
20
+ Parameters
21
+ ----------
22
+ G : NetworkX graph
23
+ An undirected graph
24
+
25
+ Returns
26
+ -------
27
+ comp : generator of sets
28
+ A generator of sets of nodes, one for each component of G.
29
+
30
+ Raises
31
+ ------
32
+ NetworkXNotImplemented
33
+ If G is directed.
34
+
35
+ Examples
36
+ --------
37
+ Generate a sorted list of connected components, largest first.
38
+
39
+ >>> G = nx.path_graph(4)
40
+ >>> nx.add_path(G, [10, 11, 12])
41
+ >>> [len(c) for c in sorted(nx.connected_components(G), key=len, reverse=True)]
42
+ [4, 3]
43
+
44
+ If you only want the largest connected component, it's more
45
+ efficient to use max instead of sort.
46
+
47
+ >>> largest_cc = max(nx.connected_components(G), key=len)
48
+
49
+ To create the induced subgraph of each component use:
50
+
51
+ >>> S = [G.subgraph(c).copy() for c in nx.connected_components(G)]
52
+
53
+ See Also
54
+ --------
55
+ strongly_connected_components
56
+ weakly_connected_components
57
+
58
+ Notes
59
+ -----
60
+ For undirected graphs only.
61
+
62
+ """
63
+ seen = set()
64
+ for v in G:
65
+ if v not in seen:
66
+ c = _plain_bfs(G, v)
67
+ seen.update(c)
68
+ yield c
69
+
70
+
71
+ @not_implemented_for("directed")
72
+ @nx._dispatchable
73
+ def number_connected_components(G):
74
+ """Returns the number of connected components.
75
+
76
+ Parameters
77
+ ----------
78
+ G : NetworkX graph
79
+ An undirected graph.
80
+
81
+ Returns
82
+ -------
83
+ n : integer
84
+ Number of connected components
85
+
86
+ Raises
87
+ ------
88
+ NetworkXNotImplemented
89
+ If G is directed.
90
+
91
+ Examples
92
+ --------
93
+ >>> G = nx.Graph([(0, 1), (1, 2), (5, 6), (3, 4)])
94
+ >>> nx.number_connected_components(G)
95
+ 3
96
+
97
+ See Also
98
+ --------
99
+ connected_components
100
+ number_weakly_connected_components
101
+ number_strongly_connected_components
102
+
103
+ Notes
104
+ -----
105
+ For undirected graphs only.
106
+
107
+ """
108
+ return sum(1 for cc in connected_components(G))
109
+
110
+
111
+ @not_implemented_for("directed")
112
+ @nx._dispatchable
113
+ def is_connected(G):
114
+ """Returns True if the graph is connected, False otherwise.
115
+
116
+ Parameters
117
+ ----------
118
+ G : NetworkX Graph
119
+ An undirected graph.
120
+
121
+ Returns
122
+ -------
123
+ connected : bool
124
+ True if the graph is connected, false otherwise.
125
+
126
+ Raises
127
+ ------
128
+ NetworkXNotImplemented
129
+ If G is directed.
130
+
131
+ Examples
132
+ --------
133
+ >>> G = nx.path_graph(4)
134
+ >>> print(nx.is_connected(G))
135
+ True
136
+
137
+ See Also
138
+ --------
139
+ is_strongly_connected
140
+ is_weakly_connected
141
+ is_semiconnected
142
+ is_biconnected
143
+ connected_components
144
+
145
+ Notes
146
+ -----
147
+ For undirected graphs only.
148
+
149
+ """
150
+ if len(G) == 0:
151
+ raise nx.NetworkXPointlessConcept(
152
+ "Connectivity is undefined for the null graph."
153
+ )
154
+ return sum(1 for node in _plain_bfs(G, arbitrary_element(G))) == len(G)
155
+
156
+
157
+ @not_implemented_for("directed")
158
+ @nx._dispatchable
159
+ def node_connected_component(G, n):
160
+ """Returns the set of nodes in the component of graph containing node n.
161
+
162
+ Parameters
163
+ ----------
164
+ G : NetworkX Graph
165
+ An undirected graph.
166
+
167
+ n : node label
168
+ A node in G
169
+
170
+ Returns
171
+ -------
172
+ comp : set
173
+ A set of nodes in the component of G containing node n.
174
+
175
+ Raises
176
+ ------
177
+ NetworkXNotImplemented
178
+ If G is directed.
179
+
180
+ Examples
181
+ --------
182
+ >>> G = nx.Graph([(0, 1), (1, 2), (5, 6), (3, 4)])
183
+ >>> nx.node_connected_component(G, 0) # nodes of component that contains node 0
184
+ {0, 1, 2}
185
+
186
+ See Also
187
+ --------
188
+ connected_components
189
+
190
+ Notes
191
+ -----
192
+ For undirected graphs only.
193
+
194
+ """
195
+ return _plain_bfs(G, n)
196
+
197
+
198
+ def _plain_bfs(G, source):
199
+ """A fast BFS node generator"""
200
+ adj = G._adj
201
+ n = len(adj)
202
+ seen = {source}
203
+ nextlevel = [source]
204
+ while nextlevel:
205
+ thislevel = nextlevel
206
+ nextlevel = []
207
+ for v in thislevel:
208
+ for w in adj[v]:
209
+ if w not in seen:
210
+ seen.add(w)
211
+ nextlevel.append(w)
212
+ if len(seen) == n:
213
+ return seen
214
+ return seen
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/semiconnected.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Semiconnectedness."""
2
+ import networkx as nx
3
+ from networkx.utils import not_implemented_for, pairwise
4
+
5
+ __all__ = ["is_semiconnected"]
6
+
7
+
8
+ @not_implemented_for("undirected")
9
+ @nx._dispatchable
10
+ def is_semiconnected(G):
11
+ r"""Returns True if the graph is semiconnected, False otherwise.
12
+
13
+ A graph is semiconnected if and only if for any pair of nodes, either one
14
+ is reachable from the other, or they are mutually reachable.
15
+
16
+ This function uses a theorem that states that a DAG is semiconnected
17
+ if for any topological sort, for node $v_n$ in that sort, there is an
18
+ edge $(v_i, v_{i+1})$. That allows us to check if a non-DAG `G` is
19
+ semiconnected by condensing the graph: i.e. constructing a new graph `H`
20
+ with nodes being the strongly connected components of `G`, and edges
21
+ (scc_1, scc_2) if there is a edge $(v_1, v_2)$ in `G` for some
22
+ $v_1 \in scc_1$ and $v_2 \in scc_2$. That results in a DAG, so we compute
23
+ the topological sort of `H` and check if for every $n$ there is an edge
24
+ $(scc_n, scc_{n+1})$.
25
+
26
+ Parameters
27
+ ----------
28
+ G : NetworkX graph
29
+ A directed graph.
30
+
31
+ Returns
32
+ -------
33
+ semiconnected : bool
34
+ True if the graph is semiconnected, False otherwise.
35
+
36
+ Raises
37
+ ------
38
+ NetworkXNotImplemented
39
+ If the input graph is undirected.
40
+
41
+ NetworkXPointlessConcept
42
+ If the graph is empty.
43
+
44
+ Examples
45
+ --------
46
+ >>> G = nx.path_graph(4, create_using=nx.DiGraph())
47
+ >>> print(nx.is_semiconnected(G))
48
+ True
49
+ >>> G = nx.DiGraph([(1, 2), (3, 2)])
50
+ >>> print(nx.is_semiconnected(G))
51
+ False
52
+
53
+ See Also
54
+ --------
55
+ is_strongly_connected
56
+ is_weakly_connected
57
+ is_connected
58
+ is_biconnected
59
+ """
60
+ if len(G) == 0:
61
+ raise nx.NetworkXPointlessConcept(
62
+ "Connectivity is undefined for the null graph."
63
+ )
64
+
65
+ if not nx.is_weakly_connected(G):
66
+ return False
67
+
68
+ H = nx.condensation(G)
69
+
70
+ return all(H.has_edge(u, v) for u, v in pairwise(nx.topological_sort(H)))
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/strongly_connected.py ADDED
@@ -0,0 +1,430 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Strongly connected components."""
2
+ import networkx as nx
3
+ from networkx.utils.decorators import not_implemented_for
4
+
5
+ __all__ = [
6
+ "number_strongly_connected_components",
7
+ "strongly_connected_components",
8
+ "is_strongly_connected",
9
+ "strongly_connected_components_recursive",
10
+ "kosaraju_strongly_connected_components",
11
+ "condensation",
12
+ ]
13
+
14
+
15
+ @not_implemented_for("undirected")
16
+ @nx._dispatchable
17
+ def strongly_connected_components(G):
18
+ """Generate nodes in strongly connected components of graph.
19
+
20
+ Parameters
21
+ ----------
22
+ G : NetworkX Graph
23
+ A directed graph.
24
+
25
+ Returns
26
+ -------
27
+ comp : generator of sets
28
+ A generator of sets of nodes, one for each strongly connected
29
+ component of G.
30
+
31
+ Raises
32
+ ------
33
+ NetworkXNotImplemented
34
+ If G is undirected.
35
+
36
+ Examples
37
+ --------
38
+ Generate a sorted list of strongly connected components, largest first.
39
+
40
+ >>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
41
+ >>> nx.add_cycle(G, [10, 11, 12])
42
+ >>> [len(c) for c in sorted(nx.strongly_connected_components(G), key=len, reverse=True)]
43
+ [4, 3]
44
+
45
+ If you only want the largest component, it's more efficient to
46
+ use max instead of sort.
47
+
48
+ >>> largest = max(nx.strongly_connected_components(G), key=len)
49
+
50
+ See Also
51
+ --------
52
+ connected_components
53
+ weakly_connected_components
54
+ kosaraju_strongly_connected_components
55
+
56
+ Notes
57
+ -----
58
+ Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
59
+ Nonrecursive version of algorithm.
60
+
61
+ References
62
+ ----------
63
+ .. [1] Depth-first search and linear graph algorithms, R. Tarjan
64
+ SIAM Journal of Computing 1(2):146-160, (1972).
65
+
66
+ .. [2] On finding the strongly connected components in a directed graph.
67
+ E. Nuutila and E. Soisalon-Soinen
68
+ Information Processing Letters 49(1): 9-14, (1994)..
69
+
70
+ """
71
+ preorder = {}
72
+ lowlink = {}
73
+ scc_found = set()
74
+ scc_queue = []
75
+ i = 0 # Preorder counter
76
+ neighbors = {v: iter(G[v]) for v in G}
77
+ for source in G:
78
+ if source not in scc_found:
79
+ queue = [source]
80
+ while queue:
81
+ v = queue[-1]
82
+ if v not in preorder:
83
+ i = i + 1
84
+ preorder[v] = i
85
+ done = True
86
+ for w in neighbors[v]:
87
+ if w not in preorder:
88
+ queue.append(w)
89
+ done = False
90
+ break
91
+ if done:
92
+ lowlink[v] = preorder[v]
93
+ for w in G[v]:
94
+ if w not in scc_found:
95
+ if preorder[w] > preorder[v]:
96
+ lowlink[v] = min([lowlink[v], lowlink[w]])
97
+ else:
98
+ lowlink[v] = min([lowlink[v], preorder[w]])
99
+ queue.pop()
100
+ if lowlink[v] == preorder[v]:
101
+ scc = {v}
102
+ while scc_queue and preorder[scc_queue[-1]] > preorder[v]:
103
+ k = scc_queue.pop()
104
+ scc.add(k)
105
+ scc_found.update(scc)
106
+ yield scc
107
+ else:
108
+ scc_queue.append(v)
109
+
110
+
111
+ @not_implemented_for("undirected")
112
+ @nx._dispatchable
113
+ def kosaraju_strongly_connected_components(G, source=None):
114
+ """Generate nodes in strongly connected components of graph.
115
+
116
+ Parameters
117
+ ----------
118
+ G : NetworkX Graph
119
+ A directed graph.
120
+
121
+ Returns
122
+ -------
123
+ comp : generator of sets
124
+ A generator of sets of nodes, one for each strongly connected
125
+ component of G.
126
+
127
+ Raises
128
+ ------
129
+ NetworkXNotImplemented
130
+ If G is undirected.
131
+
132
+ Examples
133
+ --------
134
+ Generate a sorted list of strongly connected components, largest first.
135
+
136
+ >>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
137
+ >>> nx.add_cycle(G, [10, 11, 12])
138
+ >>> [
139
+ ... len(c)
140
+ ... for c in sorted(
141
+ ... nx.kosaraju_strongly_connected_components(G), key=len, reverse=True
142
+ ... )
143
+ ... ]
144
+ [4, 3]
145
+
146
+ If you only want the largest component, it's more efficient to
147
+ use max instead of sort.
148
+
149
+ >>> largest = max(nx.kosaraju_strongly_connected_components(G), key=len)
150
+
151
+ See Also
152
+ --------
153
+ strongly_connected_components
154
+
155
+ Notes
156
+ -----
157
+ Uses Kosaraju's algorithm.
158
+
159
+ """
160
+ post = list(nx.dfs_postorder_nodes(G.reverse(copy=False), source=source))
161
+
162
+ seen = set()
163
+ while post:
164
+ r = post.pop()
165
+ if r in seen:
166
+ continue
167
+ c = nx.dfs_preorder_nodes(G, r)
168
+ new = {v for v in c if v not in seen}
169
+ seen.update(new)
170
+ yield new
171
+
172
+
173
+ @not_implemented_for("undirected")
174
+ @nx._dispatchable
175
+ def strongly_connected_components_recursive(G):
176
+ """Generate nodes in strongly connected components of graph.
177
+
178
+ .. deprecated:: 3.2
179
+
180
+ This function is deprecated and will be removed in a future version of
181
+ NetworkX. Use `strongly_connected_components` instead.
182
+
183
+ Recursive version of algorithm.
184
+
185
+ Parameters
186
+ ----------
187
+ G : NetworkX Graph
188
+ A directed graph.
189
+
190
+ Returns
191
+ -------
192
+ comp : generator of sets
193
+ A generator of sets of nodes, one for each strongly connected
194
+ component of G.
195
+
196
+ Raises
197
+ ------
198
+ NetworkXNotImplemented
199
+ If G is undirected.
200
+
201
+ Examples
202
+ --------
203
+ Generate a sorted list of strongly connected components, largest first.
204
+
205
+ >>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
206
+ >>> nx.add_cycle(G, [10, 11, 12])
207
+ >>> [
208
+ ... len(c)
209
+ ... for c in sorted(
210
+ ... nx.strongly_connected_components_recursive(G), key=len, reverse=True
211
+ ... )
212
+ ... ]
213
+ [4, 3]
214
+
215
+ If you only want the largest component, it's more efficient to
216
+ use max instead of sort.
217
+
218
+ >>> largest = max(nx.strongly_connected_components_recursive(G), key=len)
219
+
220
+ To create the induced subgraph of the components use:
221
+ >>> S = [G.subgraph(c).copy() for c in nx.weakly_connected_components(G)]
222
+
223
+ See Also
224
+ --------
225
+ connected_components
226
+
227
+ Notes
228
+ -----
229
+ Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
230
+
231
+ References
232
+ ----------
233
+ .. [1] Depth-first search and linear graph algorithms, R. Tarjan
234
+ SIAM Journal of Computing 1(2):146-160, (1972).
235
+
236
+ .. [2] On finding the strongly connected components in a directed graph.
237
+ E. Nuutila and E. Soisalon-Soinen
238
+ Information Processing Letters 49(1): 9-14, (1994)..
239
+
240
+ """
241
+ import warnings
242
+
243
+ warnings.warn(
244
+ (
245
+ "\n\nstrongly_connected_components_recursive is deprecated and will be\n"
246
+ "removed in the future. Use strongly_connected_components instead."
247
+ ),
248
+ category=DeprecationWarning,
249
+ stacklevel=2,
250
+ )
251
+
252
+ yield from strongly_connected_components(G)
253
+
254
+
255
+ @not_implemented_for("undirected")
256
+ @nx._dispatchable
257
+ def number_strongly_connected_components(G):
258
+ """Returns number of strongly connected components in graph.
259
+
260
+ Parameters
261
+ ----------
262
+ G : NetworkX graph
263
+ A directed graph.
264
+
265
+ Returns
266
+ -------
267
+ n : integer
268
+ Number of strongly connected components
269
+
270
+ Raises
271
+ ------
272
+ NetworkXNotImplemented
273
+ If G is undirected.
274
+
275
+ Examples
276
+ --------
277
+ >>> G = nx.DiGraph(
278
+ ... [(0, 1), (1, 2), (2, 0), (2, 3), (4, 5), (3, 4), (5, 6), (6, 3), (6, 7)]
279
+ ... )
280
+ >>> nx.number_strongly_connected_components(G)
281
+ 3
282
+
283
+ See Also
284
+ --------
285
+ strongly_connected_components
286
+ number_connected_components
287
+ number_weakly_connected_components
288
+
289
+ Notes
290
+ -----
291
+ For directed graphs only.
292
+ """
293
+ return sum(1 for scc in strongly_connected_components(G))
294
+
295
+
296
+ @not_implemented_for("undirected")
297
+ @nx._dispatchable
298
+ def is_strongly_connected(G):
299
+ """Test directed graph for strong connectivity.
300
+
301
+ A directed graph is strongly connected if and only if every vertex in
302
+ the graph is reachable from every other vertex.
303
+
304
+ Parameters
305
+ ----------
306
+ G : NetworkX Graph
307
+ A directed graph.
308
+
309
+ Returns
310
+ -------
311
+ connected : bool
312
+ True if the graph is strongly connected, False otherwise.
313
+
314
+ Examples
315
+ --------
316
+ >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0), (2, 4), (4, 2)])
317
+ >>> nx.is_strongly_connected(G)
318
+ True
319
+ >>> G.remove_edge(2, 3)
320
+ >>> nx.is_strongly_connected(G)
321
+ False
322
+
323
+ Raises
324
+ ------
325
+ NetworkXNotImplemented
326
+ If G is undirected.
327
+
328
+ See Also
329
+ --------
330
+ is_weakly_connected
331
+ is_semiconnected
332
+ is_connected
333
+ is_biconnected
334
+ strongly_connected_components
335
+
336
+ Notes
337
+ -----
338
+ For directed graphs only.
339
+ """
340
+ if len(G) == 0:
341
+ raise nx.NetworkXPointlessConcept(
342
+ """Connectivity is undefined for the null graph."""
343
+ )
344
+
345
+ return len(next(strongly_connected_components(G))) == len(G)
346
+
347
+
348
+ @not_implemented_for("undirected")
349
+ @nx._dispatchable(returns_graph=True)
350
+ def condensation(G, scc=None):
351
+ """Returns the condensation of G.
352
+
353
+ The condensation of G is the graph with each of the strongly connected
354
+ components contracted into a single node.
355
+
356
+ Parameters
357
+ ----------
358
+ G : NetworkX DiGraph
359
+ A directed graph.
360
+
361
+ scc: list or generator (optional, default=None)
362
+ Strongly connected components. If provided, the elements in
363
+ `scc` must partition the nodes in `G`. If not provided, it will be
364
+ calculated as scc=nx.strongly_connected_components(G).
365
+
366
+ Returns
367
+ -------
368
+ C : NetworkX DiGraph
369
+ The condensation graph C of G. The node labels are integers
370
+ corresponding to the index of the component in the list of
371
+ strongly connected components of G. C has a graph attribute named
372
+ 'mapping' with a dictionary mapping the original nodes to the
373
+ nodes in C to which they belong. Each node in C also has a node
374
+ attribute 'members' with the set of original nodes in G that
375
+ form the SCC that the node in C represents.
376
+
377
+ Raises
378
+ ------
379
+ NetworkXNotImplemented
380
+ If G is undirected.
381
+
382
+ Examples
383
+ --------
384
+ Contracting two sets of strongly connected nodes into two distinct SCC
385
+ using the barbell graph.
386
+
387
+ >>> G = nx.barbell_graph(4, 0)
388
+ >>> G.remove_edge(3, 4)
389
+ >>> G = nx.DiGraph(G)
390
+ >>> H = nx.condensation(G)
391
+ >>> H.nodes.data()
392
+ NodeDataView({0: {'members': {0, 1, 2, 3}}, 1: {'members': {4, 5, 6, 7}}})
393
+ >>> H.graph["mapping"]
394
+ {0: 0, 1: 0, 2: 0, 3: 0, 4: 1, 5: 1, 6: 1, 7: 1}
395
+
396
+ Contracting a complete graph into one single SCC.
397
+
398
+ >>> G = nx.complete_graph(7, create_using=nx.DiGraph)
399
+ >>> H = nx.condensation(G)
400
+ >>> H.nodes
401
+ NodeView((0,))
402
+ >>> H.nodes.data()
403
+ NodeDataView({0: {'members': {0, 1, 2, 3, 4, 5, 6}}})
404
+
405
+ Notes
406
+ -----
407
+ After contracting all strongly connected components to a single node,
408
+ the resulting graph is a directed acyclic graph.
409
+
410
+ """
411
+ if scc is None:
412
+ scc = nx.strongly_connected_components(G)
413
+ mapping = {}
414
+ members = {}
415
+ C = nx.DiGraph()
416
+ # Add mapping dict as graph attribute
417
+ C.graph["mapping"] = mapping
418
+ if len(G) == 0:
419
+ return C
420
+ for i, component in enumerate(scc):
421
+ members[i] = component
422
+ mapping.update((n, i) for n in component)
423
+ number_of_components = i + 1
424
+ C.add_nodes_from(range(number_of_components))
425
+ C.add_edges_from(
426
+ (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v]
427
+ )
428
+ # Add a list of members (ie original nodes) to each node (ie scc) in C.
429
+ nx.set_node_attributes(C, members, "members")
430
+ return C
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (201 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-310.pyc ADDED
Binary file (2.76 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-310.pyc ADDED
Binary file (6.72 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_connected.cpython-310.pyc ADDED
Binary file (4.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_semiconnected.cpython-310.pyc ADDED
Binary file (3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_strongly_connected.cpython-310.pyc ADDED
Binary file (7.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/__pycache__/test_weakly_connected.cpython-310.pyc ADDED
Binary file (3.99 kB). View file
 
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_attracting.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx import NetworkXNotImplemented
5
+
6
+
7
+ class TestAttractingComponents:
8
+ @classmethod
9
+ def setup_class(cls):
10
+ cls.G1 = nx.DiGraph()
11
+ cls.G1.add_edges_from(
12
+ [
13
+ (5, 11),
14
+ (11, 2),
15
+ (11, 9),
16
+ (11, 10),
17
+ (7, 11),
18
+ (7, 8),
19
+ (8, 9),
20
+ (3, 8),
21
+ (3, 10),
22
+ ]
23
+ )
24
+ cls.G2 = nx.DiGraph()
25
+ cls.G2.add_edges_from([(0, 1), (0, 2), (1, 1), (1, 2), (2, 1)])
26
+
27
+ cls.G3 = nx.DiGraph()
28
+ cls.G3.add_edges_from([(0, 1), (1, 2), (2, 1), (0, 3), (3, 4), (4, 3)])
29
+
30
+ cls.G4 = nx.DiGraph()
31
+
32
+ def test_attracting_components(self):
33
+ ac = list(nx.attracting_components(self.G1))
34
+ assert {2} in ac
35
+ assert {9} in ac
36
+ assert {10} in ac
37
+
38
+ ac = list(nx.attracting_components(self.G2))
39
+ ac = [tuple(sorted(x)) for x in ac]
40
+ assert ac == [(1, 2)]
41
+
42
+ ac = list(nx.attracting_components(self.G3))
43
+ ac = [tuple(sorted(x)) for x in ac]
44
+ assert (1, 2) in ac
45
+ assert (3, 4) in ac
46
+ assert len(ac) == 2
47
+
48
+ ac = list(nx.attracting_components(self.G4))
49
+ assert ac == []
50
+
51
+ def test_number_attacting_components(self):
52
+ assert nx.number_attracting_components(self.G1) == 3
53
+ assert nx.number_attracting_components(self.G2) == 1
54
+ assert nx.number_attracting_components(self.G3) == 2
55
+ assert nx.number_attracting_components(self.G4) == 0
56
+
57
+ def test_is_attracting_component(self):
58
+ assert not nx.is_attracting_component(self.G1)
59
+ assert not nx.is_attracting_component(self.G2)
60
+ assert not nx.is_attracting_component(self.G3)
61
+ g2 = self.G3.subgraph([1, 2])
62
+ assert nx.is_attracting_component(g2)
63
+ assert not nx.is_attracting_component(self.G4)
64
+
65
+ def test_connected_raise(self):
66
+ G = nx.Graph()
67
+ with pytest.raises(NetworkXNotImplemented):
68
+ next(nx.attracting_components(G))
69
+ pytest.raises(NetworkXNotImplemented, nx.number_attracting_components, G)
70
+ pytest.raises(NetworkXNotImplemented, nx.is_attracting_component, G)
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_biconnected.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx import NetworkXNotImplemented
5
+
6
+
7
+ def assert_components_edges_equal(x, y):
8
+ sx = {frozenset(frozenset(e) for e in c) for c in x}
9
+ sy = {frozenset(frozenset(e) for e in c) for c in y}
10
+ assert sx == sy
11
+
12
+
13
+ def assert_components_equal(x, y):
14
+ sx = {frozenset(c) for c in x}
15
+ sy = {frozenset(c) for c in y}
16
+ assert sx == sy
17
+
18
+
19
+ def test_barbell():
20
+ G = nx.barbell_graph(8, 4)
21
+ nx.add_path(G, [7, 20, 21, 22])
22
+ nx.add_cycle(G, [22, 23, 24, 25])
23
+ pts = set(nx.articulation_points(G))
24
+ assert pts == {7, 8, 9, 10, 11, 12, 20, 21, 22}
25
+
26
+ answer = [
27
+ {12, 13, 14, 15, 16, 17, 18, 19},
28
+ {0, 1, 2, 3, 4, 5, 6, 7},
29
+ {22, 23, 24, 25},
30
+ {11, 12},
31
+ {10, 11},
32
+ {9, 10},
33
+ {8, 9},
34
+ {7, 8},
35
+ {21, 22},
36
+ {20, 21},
37
+ {7, 20},
38
+ ]
39
+ assert_components_equal(list(nx.biconnected_components(G)), answer)
40
+
41
+ G.add_edge(2, 17)
42
+ pts = set(nx.articulation_points(G))
43
+ assert pts == {7, 20, 21, 22}
44
+
45
+
46
+ def test_articulation_points_repetitions():
47
+ G = nx.Graph()
48
+ G.add_edges_from([(0, 1), (1, 2), (1, 3)])
49
+ assert list(nx.articulation_points(G)) == [1]
50
+
51
+
52
+ def test_articulation_points_cycle():
53
+ G = nx.cycle_graph(3)
54
+ nx.add_cycle(G, [1, 3, 4])
55
+ pts = set(nx.articulation_points(G))
56
+ assert pts == {1}
57
+
58
+
59
+ def test_is_biconnected():
60
+ G = nx.cycle_graph(3)
61
+ assert nx.is_biconnected(G)
62
+ nx.add_cycle(G, [1, 3, 4])
63
+ assert not nx.is_biconnected(G)
64
+
65
+
66
+ def test_empty_is_biconnected():
67
+ G = nx.empty_graph(5)
68
+ assert not nx.is_biconnected(G)
69
+ G.add_edge(0, 1)
70
+ assert not nx.is_biconnected(G)
71
+
72
+
73
+ def test_biconnected_components_cycle():
74
+ G = nx.cycle_graph(3)
75
+ nx.add_cycle(G, [1, 3, 4])
76
+ answer = [{0, 1, 2}, {1, 3, 4}]
77
+ assert_components_equal(list(nx.biconnected_components(G)), answer)
78
+
79
+
80
+ def test_biconnected_components1():
81
+ # graph example from
82
+ # https://web.archive.org/web/20121229123447/http://www.ibluemojo.com/school/articul_algorithm.html
83
+ edges = [
84
+ (0, 1),
85
+ (0, 5),
86
+ (0, 6),
87
+ (0, 14),
88
+ (1, 5),
89
+ (1, 6),
90
+ (1, 14),
91
+ (2, 4),
92
+ (2, 10),
93
+ (3, 4),
94
+ (3, 15),
95
+ (4, 6),
96
+ (4, 7),
97
+ (4, 10),
98
+ (5, 14),
99
+ (6, 14),
100
+ (7, 9),
101
+ (8, 9),
102
+ (8, 12),
103
+ (8, 13),
104
+ (10, 15),
105
+ (11, 12),
106
+ (11, 13),
107
+ (12, 13),
108
+ ]
109
+ G = nx.Graph(edges)
110
+ pts = set(nx.articulation_points(G))
111
+ assert pts == {4, 6, 7, 8, 9}
112
+ comps = list(nx.biconnected_component_edges(G))
113
+ answer = [
114
+ [(3, 4), (15, 3), (10, 15), (10, 4), (2, 10), (4, 2)],
115
+ [(13, 12), (13, 8), (11, 13), (12, 11), (8, 12)],
116
+ [(9, 8)],
117
+ [(7, 9)],
118
+ [(4, 7)],
119
+ [(6, 4)],
120
+ [(14, 0), (5, 1), (5, 0), (14, 5), (14, 1), (6, 14), (6, 0), (1, 6), (0, 1)],
121
+ ]
122
+ assert_components_edges_equal(comps, answer)
123
+
124
+
125
+ def test_biconnected_components2():
126
+ G = nx.Graph()
127
+ nx.add_cycle(G, "ABC")
128
+ nx.add_cycle(G, "CDE")
129
+ nx.add_cycle(G, "FIJHG")
130
+ nx.add_cycle(G, "GIJ")
131
+ G.add_edge("E", "G")
132
+ comps = list(nx.biconnected_component_edges(G))
133
+ answer = [
134
+ [
135
+ tuple("GF"),
136
+ tuple("FI"),
137
+ tuple("IG"),
138
+ tuple("IJ"),
139
+ tuple("JG"),
140
+ tuple("JH"),
141
+ tuple("HG"),
142
+ ],
143
+ [tuple("EG")],
144
+ [tuple("CD"), tuple("DE"), tuple("CE")],
145
+ [tuple("AB"), tuple("BC"), tuple("AC")],
146
+ ]
147
+ assert_components_edges_equal(comps, answer)
148
+
149
+
150
+ def test_biconnected_davis():
151
+ D = nx.davis_southern_women_graph()
152
+ bcc = list(nx.biconnected_components(D))[0]
153
+ assert set(D) == bcc # All nodes in a giant bicomponent
154
+ # So no articulation points
155
+ assert len(list(nx.articulation_points(D))) == 0
156
+
157
+
158
+ def test_biconnected_karate():
159
+ K = nx.karate_club_graph()
160
+ answer = [
161
+ {
162
+ 0,
163
+ 1,
164
+ 2,
165
+ 3,
166
+ 7,
167
+ 8,
168
+ 9,
169
+ 12,
170
+ 13,
171
+ 14,
172
+ 15,
173
+ 17,
174
+ 18,
175
+ 19,
176
+ 20,
177
+ 21,
178
+ 22,
179
+ 23,
180
+ 24,
181
+ 25,
182
+ 26,
183
+ 27,
184
+ 28,
185
+ 29,
186
+ 30,
187
+ 31,
188
+ 32,
189
+ 33,
190
+ },
191
+ {0, 4, 5, 6, 10, 16},
192
+ {0, 11},
193
+ ]
194
+ bcc = list(nx.biconnected_components(K))
195
+ assert_components_equal(bcc, answer)
196
+ assert set(nx.articulation_points(K)) == {0}
197
+
198
+
199
+ def test_biconnected_eppstein():
200
+ # tests from http://www.ics.uci.edu/~eppstein/PADS/Biconnectivity.py
201
+ G1 = nx.Graph(
202
+ {
203
+ 0: [1, 2, 5],
204
+ 1: [0, 5],
205
+ 2: [0, 3, 4],
206
+ 3: [2, 4, 5, 6],
207
+ 4: [2, 3, 5, 6],
208
+ 5: [0, 1, 3, 4],
209
+ 6: [3, 4],
210
+ }
211
+ )
212
+ G2 = nx.Graph(
213
+ {
214
+ 0: [2, 5],
215
+ 1: [3, 8],
216
+ 2: [0, 3, 5],
217
+ 3: [1, 2, 6, 8],
218
+ 4: [7],
219
+ 5: [0, 2],
220
+ 6: [3, 8],
221
+ 7: [4],
222
+ 8: [1, 3, 6],
223
+ }
224
+ )
225
+ assert nx.is_biconnected(G1)
226
+ assert not nx.is_biconnected(G2)
227
+ answer_G2 = [{1, 3, 6, 8}, {0, 2, 5}, {2, 3}, {4, 7}]
228
+ bcc = list(nx.biconnected_components(G2))
229
+ assert_components_equal(bcc, answer_G2)
230
+
231
+
232
+ def test_null_graph():
233
+ G = nx.Graph()
234
+ assert not nx.is_biconnected(G)
235
+ assert list(nx.biconnected_components(G)) == []
236
+ assert list(nx.biconnected_component_edges(G)) == []
237
+ assert list(nx.articulation_points(G)) == []
238
+
239
+
240
+ def test_connected_raise():
241
+ DG = nx.DiGraph()
242
+ with pytest.raises(NetworkXNotImplemented):
243
+ next(nx.biconnected_components(DG))
244
+ with pytest.raises(NetworkXNotImplemented):
245
+ next(nx.biconnected_component_edges(DG))
246
+ with pytest.raises(NetworkXNotImplemented):
247
+ next(nx.articulation_points(DG))
248
+ pytest.raises(NetworkXNotImplemented, nx.is_biconnected, DG)
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_connected.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx import NetworkXNotImplemented
5
+ from networkx import convert_node_labels_to_integers as cnlti
6
+ from networkx.classes.tests import dispatch_interface
7
+
8
+
9
+ class TestConnected:
10
+ @classmethod
11
+ def setup_class(cls):
12
+ G1 = cnlti(nx.grid_2d_graph(2, 2), first_label=0, ordering="sorted")
13
+ G2 = cnlti(nx.lollipop_graph(3, 3), first_label=4, ordering="sorted")
14
+ G3 = cnlti(nx.house_graph(), first_label=10, ordering="sorted")
15
+ cls.G = nx.union(G1, G2)
16
+ cls.G = nx.union(cls.G, G3)
17
+ cls.DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)])
18
+ cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1)
19
+
20
+ cls.gc = []
21
+ G = nx.DiGraph()
22
+ G.add_edges_from(
23
+ [
24
+ (1, 2),
25
+ (2, 3),
26
+ (2, 8),
27
+ (3, 4),
28
+ (3, 7),
29
+ (4, 5),
30
+ (5, 3),
31
+ (5, 6),
32
+ (7, 4),
33
+ (7, 6),
34
+ (8, 1),
35
+ (8, 7),
36
+ ]
37
+ )
38
+ C = [[3, 4, 5, 7], [1, 2, 8], [6]]
39
+ cls.gc.append((G, C))
40
+
41
+ G = nx.DiGraph()
42
+ G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
43
+ C = [[2, 3, 4], [1]]
44
+ cls.gc.append((G, C))
45
+
46
+ G = nx.DiGraph()
47
+ G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
48
+ C = [[1, 2, 3]]
49
+ cls.gc.append((G, C))
50
+
51
+ # Eppstein's tests
52
+ G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
53
+ C = [[0], [1], [2], [3], [4], [5], [6]]
54
+ cls.gc.append((G, C))
55
+
56
+ G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
57
+ C = [[0, 1, 2], [3, 4]]
58
+ cls.gc.append((G, C))
59
+
60
+ G = nx.DiGraph()
61
+ C = []
62
+ cls.gc.append((G, C))
63
+
64
+ # This additionally tests the @nx._dispatchable mechanism, treating
65
+ # nx.connected_components as if it were a re-implementation from another package
66
+ @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert])
67
+ def test_connected_components(self, wrapper):
68
+ cc = nx.connected_components
69
+ G = wrapper(self.G)
70
+ C = {
71
+ frozenset([0, 1, 2, 3]),
72
+ frozenset([4, 5, 6, 7, 8, 9]),
73
+ frozenset([10, 11, 12, 13, 14]),
74
+ }
75
+ assert {frozenset(g) for g in cc(G)} == C
76
+
77
+ def test_number_connected_components(self):
78
+ ncc = nx.number_connected_components
79
+ assert ncc(self.G) == 3
80
+
81
+ def test_number_connected_components2(self):
82
+ ncc = nx.number_connected_components
83
+ assert ncc(self.grid) == 1
84
+
85
+ def test_connected_components2(self):
86
+ cc = nx.connected_components
87
+ G = self.grid
88
+ C = {frozenset([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16])}
89
+ assert {frozenset(g) for g in cc(G)} == C
90
+
91
+ def test_node_connected_components(self):
92
+ ncc = nx.node_connected_component
93
+ G = self.grid
94
+ C = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}
95
+ assert ncc(G, 1) == C
96
+
97
+ def test_is_connected(self):
98
+ assert nx.is_connected(self.grid)
99
+ G = nx.Graph()
100
+ G.add_nodes_from([1, 2])
101
+ assert not nx.is_connected(G)
102
+
103
+ def test_connected_raise(self):
104
+ with pytest.raises(NetworkXNotImplemented):
105
+ next(nx.connected_components(self.DG))
106
+ pytest.raises(NetworkXNotImplemented, nx.number_connected_components, self.DG)
107
+ pytest.raises(NetworkXNotImplemented, nx.node_connected_component, self.DG, 1)
108
+ pytest.raises(NetworkXNotImplemented, nx.is_connected, self.DG)
109
+ pytest.raises(nx.NetworkXPointlessConcept, nx.is_connected, nx.Graph())
110
+
111
+ def test_connected_mutability(self):
112
+ G = self.grid
113
+ seen = set()
114
+ for component in nx.connected_components(G):
115
+ assert len(seen & component) == 0
116
+ seen.update(component)
117
+ component.clear()
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_semiconnected.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from itertools import chain
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+
7
+
8
+ class TestIsSemiconnected:
9
+ def test_undirected(self):
10
+ pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.Graph())
11
+ pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.MultiGraph())
12
+
13
+ def test_empty(self):
14
+ pytest.raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.DiGraph())
15
+ pytest.raises(
16
+ nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.MultiDiGraph()
17
+ )
18
+
19
+ def test_single_node_graph(self):
20
+ G = nx.DiGraph()
21
+ G.add_node(0)
22
+ assert nx.is_semiconnected(G)
23
+
24
+ def test_path(self):
25
+ G = nx.path_graph(100, create_using=nx.DiGraph())
26
+ assert nx.is_semiconnected(G)
27
+ G.add_edge(100, 99)
28
+ assert not nx.is_semiconnected(G)
29
+
30
+ def test_cycle(self):
31
+ G = nx.cycle_graph(100, create_using=nx.DiGraph())
32
+ assert nx.is_semiconnected(G)
33
+ G = nx.path_graph(100, create_using=nx.DiGraph())
34
+ G.add_edge(0, 99)
35
+ assert nx.is_semiconnected(G)
36
+
37
+ def test_tree(self):
38
+ G = nx.DiGraph()
39
+ G.add_edges_from(
40
+ chain.from_iterable([(i, 2 * i + 1), (i, 2 * i + 2)] for i in range(100))
41
+ )
42
+ assert not nx.is_semiconnected(G)
43
+
44
+ def test_dumbbell(self):
45
+ G = nx.cycle_graph(100, create_using=nx.DiGraph())
46
+ G.add_edges_from((i + 100, (i + 1) % 100 + 100) for i in range(100))
47
+ assert not nx.is_semiconnected(G) # G is disconnected.
48
+ G.add_edge(100, 99)
49
+ assert nx.is_semiconnected(G)
50
+
51
+ def test_alternating_path(self):
52
+ G = nx.DiGraph(
53
+ chain.from_iterable([(i, i - 1), (i, i + 1)] for i in range(0, 100, 2))
54
+ )
55
+ assert not nx.is_semiconnected(G)
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx import NetworkXNotImplemented
5
+
6
+
7
+ class TestStronglyConnected:
8
+ @classmethod
9
+ def setup_class(cls):
10
+ cls.gc = []
11
+ G = nx.DiGraph()
12
+ G.add_edges_from(
13
+ [
14
+ (1, 2),
15
+ (2, 3),
16
+ (2, 8),
17
+ (3, 4),
18
+ (3, 7),
19
+ (4, 5),
20
+ (5, 3),
21
+ (5, 6),
22
+ (7, 4),
23
+ (7, 6),
24
+ (8, 1),
25
+ (8, 7),
26
+ ]
27
+ )
28
+ C = {frozenset([3, 4, 5, 7]), frozenset([1, 2, 8]), frozenset([6])}
29
+ cls.gc.append((G, C))
30
+
31
+ G = nx.DiGraph()
32
+ G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
33
+ C = {frozenset([2, 3, 4]), frozenset([1])}
34
+ cls.gc.append((G, C))
35
+
36
+ G = nx.DiGraph()
37
+ G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
38
+ C = {frozenset([1, 2, 3])}
39
+ cls.gc.append((G, C))
40
+
41
+ # Eppstein's tests
42
+ G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
43
+ C = {
44
+ frozenset([0]),
45
+ frozenset([1]),
46
+ frozenset([2]),
47
+ frozenset([3]),
48
+ frozenset([4]),
49
+ frozenset([5]),
50
+ frozenset([6]),
51
+ }
52
+ cls.gc.append((G, C))
53
+
54
+ G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
55
+ C = {frozenset([0, 1, 2]), frozenset([3, 4])}
56
+ cls.gc.append((G, C))
57
+
58
+ def test_tarjan(self):
59
+ scc = nx.strongly_connected_components
60
+ for G, C in self.gc:
61
+ assert {frozenset(g) for g in scc(G)} == C
62
+
63
+ def test_tarjan_recursive(self):
64
+ scc = nx.strongly_connected_components_recursive
65
+ for G, C in self.gc:
66
+ with pytest.deprecated_call():
67
+ assert {frozenset(g) for g in scc(G)} == C
68
+
69
+ def test_kosaraju(self):
70
+ scc = nx.kosaraju_strongly_connected_components
71
+ for G, C in self.gc:
72
+ assert {frozenset(g) for g in scc(G)} == C
73
+
74
+ def test_number_strongly_connected_components(self):
75
+ ncc = nx.number_strongly_connected_components
76
+ for G, C in self.gc:
77
+ assert ncc(G) == len(C)
78
+
79
+ def test_is_strongly_connected(self):
80
+ for G, C in self.gc:
81
+ if len(C) == 1:
82
+ assert nx.is_strongly_connected(G)
83
+ else:
84
+ assert not nx.is_strongly_connected(G)
85
+
86
+ def test_contract_scc1(self):
87
+ G = nx.DiGraph()
88
+ G.add_edges_from(
89
+ [
90
+ (1, 2),
91
+ (2, 3),
92
+ (2, 11),
93
+ (2, 12),
94
+ (3, 4),
95
+ (4, 3),
96
+ (4, 5),
97
+ (5, 6),
98
+ (6, 5),
99
+ (6, 7),
100
+ (7, 8),
101
+ (7, 9),
102
+ (7, 10),
103
+ (8, 9),
104
+ (9, 7),
105
+ (10, 6),
106
+ (11, 2),
107
+ (11, 4),
108
+ (11, 6),
109
+ (12, 6),
110
+ (12, 11),
111
+ ]
112
+ )
113
+ scc = list(nx.strongly_connected_components(G))
114
+ cG = nx.condensation(G, scc)
115
+ # DAG
116
+ assert nx.is_directed_acyclic_graph(cG)
117
+ # nodes
118
+ assert sorted(cG.nodes()) == [0, 1, 2, 3]
119
+ # edges
120
+ mapping = {}
121
+ for i, component in enumerate(scc):
122
+ for n in component:
123
+ mapping[n] = i
124
+ edge = (mapping[2], mapping[3])
125
+ assert cG.has_edge(*edge)
126
+ edge = (mapping[2], mapping[5])
127
+ assert cG.has_edge(*edge)
128
+ edge = (mapping[3], mapping[5])
129
+ assert cG.has_edge(*edge)
130
+
131
+ def test_contract_scc_isolate(self):
132
+ # Bug found and fixed in [1687].
133
+ G = nx.DiGraph()
134
+ G.add_edge(1, 2)
135
+ G.add_edge(2, 1)
136
+ scc = list(nx.strongly_connected_components(G))
137
+ cG = nx.condensation(G, scc)
138
+ assert list(cG.nodes()) == [0]
139
+ assert list(cG.edges()) == []
140
+
141
+ def test_contract_scc_edge(self):
142
+ G = nx.DiGraph()
143
+ G.add_edge(1, 2)
144
+ G.add_edge(2, 1)
145
+ G.add_edge(2, 3)
146
+ G.add_edge(3, 4)
147
+ G.add_edge(4, 3)
148
+ scc = list(nx.strongly_connected_components(G))
149
+ cG = nx.condensation(G, scc)
150
+ assert sorted(cG.nodes()) == [0, 1]
151
+ if 1 in scc[0]:
152
+ edge = (0, 1)
153
+ else:
154
+ edge = (1, 0)
155
+ assert list(cG.edges()) == [edge]
156
+
157
+ def test_condensation_mapping_and_members(self):
158
+ G, C = self.gc[1]
159
+ C = sorted(C, key=len, reverse=True)
160
+ cG = nx.condensation(G)
161
+ mapping = cG.graph["mapping"]
162
+ assert all(n in G for n in mapping)
163
+ assert all(0 == cN for n, cN in mapping.items() if n in C[0])
164
+ assert all(1 == cN for n, cN in mapping.items() if n in C[1])
165
+ for n, d in cG.nodes(data=True):
166
+ assert set(C[n]) == cG.nodes[n]["members"]
167
+
168
+ def test_null_graph(self):
169
+ G = nx.DiGraph()
170
+ assert list(nx.strongly_connected_components(G)) == []
171
+ assert list(nx.kosaraju_strongly_connected_components(G)) == []
172
+ with pytest.deprecated_call():
173
+ assert list(nx.strongly_connected_components_recursive(G)) == []
174
+ assert len(nx.condensation(G)) == 0
175
+ pytest.raises(
176
+ nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph()
177
+ )
178
+
179
+ def test_connected_raise(self):
180
+ G = nx.Graph()
181
+ with pytest.raises(NetworkXNotImplemented):
182
+ next(nx.strongly_connected_components(G))
183
+ with pytest.raises(NetworkXNotImplemented):
184
+ next(nx.kosaraju_strongly_connected_components(G))
185
+ with pytest.raises(NetworkXNotImplemented):
186
+ next(nx.strongly_connected_components_recursive(G))
187
+ pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G)
188
+ pytest.raises(NetworkXNotImplemented, nx.condensation, G)
189
+
190
+ strong_cc_methods = (
191
+ nx.strongly_connected_components,
192
+ nx.kosaraju_strongly_connected_components,
193
+ )
194
+
195
+ @pytest.mark.parametrize("get_components", strong_cc_methods)
196
+ def test_connected_mutability(self, get_components):
197
+ DG = nx.path_graph(5, create_using=nx.DiGraph)
198
+ G = nx.disjoint_union(DG, DG)
199
+ seen = set()
200
+ for component in get_components(G):
201
+ assert len(seen & component) == 0
202
+ seen.update(component)
203
+ component.clear()
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx import NetworkXNotImplemented
5
+
6
+
7
+ class TestWeaklyConnected:
8
+ @classmethod
9
+ def setup_class(cls):
10
+ cls.gc = []
11
+ G = nx.DiGraph()
12
+ G.add_edges_from(
13
+ [
14
+ (1, 2),
15
+ (2, 3),
16
+ (2, 8),
17
+ (3, 4),
18
+ (3, 7),
19
+ (4, 5),
20
+ (5, 3),
21
+ (5, 6),
22
+ (7, 4),
23
+ (7, 6),
24
+ (8, 1),
25
+ (8, 7),
26
+ ]
27
+ )
28
+ C = [[3, 4, 5, 7], [1, 2, 8], [6]]
29
+ cls.gc.append((G, C))
30
+
31
+ G = nx.DiGraph()
32
+ G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
33
+ C = [[2, 3, 4], [1]]
34
+ cls.gc.append((G, C))
35
+
36
+ G = nx.DiGraph()
37
+ G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
38
+ C = [[1, 2, 3]]
39
+ cls.gc.append((G, C))
40
+
41
+ # Eppstein's tests
42
+ G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
43
+ C = [[0], [1], [2], [3], [4], [5], [6]]
44
+ cls.gc.append((G, C))
45
+
46
+ G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
47
+ C = [[0, 1, 2], [3, 4]]
48
+ cls.gc.append((G, C))
49
+
50
+ def test_weakly_connected_components(self):
51
+ for G, C in self.gc:
52
+ U = G.to_undirected()
53
+ w = {frozenset(g) for g in nx.weakly_connected_components(G)}
54
+ c = {frozenset(g) for g in nx.connected_components(U)}
55
+ assert w == c
56
+
57
+ def test_number_weakly_connected_components(self):
58
+ for G, C in self.gc:
59
+ U = G.to_undirected()
60
+ w = nx.number_weakly_connected_components(G)
61
+ c = nx.number_connected_components(U)
62
+ assert w == c
63
+
64
+ def test_is_weakly_connected(self):
65
+ for G, C in self.gc:
66
+ U = G.to_undirected()
67
+ assert nx.is_weakly_connected(G) == nx.is_connected(U)
68
+
69
+ def test_null_graph(self):
70
+ G = nx.DiGraph()
71
+ assert list(nx.weakly_connected_components(G)) == []
72
+ assert nx.number_weakly_connected_components(G) == 0
73
+ with pytest.raises(nx.NetworkXPointlessConcept):
74
+ next(nx.is_weakly_connected(G))
75
+
76
+ def test_connected_raise(self):
77
+ G = nx.Graph()
78
+ with pytest.raises(NetworkXNotImplemented):
79
+ next(nx.weakly_connected_components(G))
80
+ pytest.raises(NetworkXNotImplemented, nx.number_weakly_connected_components, G)
81
+ pytest.raises(NetworkXNotImplemented, nx.is_weakly_connected, G)
82
+
83
+ def test_connected_mutability(self):
84
+ DG = nx.path_graph(5, create_using=nx.DiGraph)
85
+ G = nx.disjoint_union(DG, DG)
86
+ seen = set()
87
+ for component in nx.weakly_connected_components(G):
88
+ assert len(seen & component) == 0
89
+ seen.update(component)
90
+ component.clear()
91
+
92
+
93
+ def test_is_weakly_connected_empty_graph_raises():
94
+ G = nx.DiGraph()
95
+ with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"):
96
+ nx.is_weakly_connected(G)
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/components/weakly_connected.py ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Weakly connected components."""
2
+ import networkx as nx
3
+ from networkx.utils.decorators import not_implemented_for
4
+
5
+ __all__ = [
6
+ "number_weakly_connected_components",
7
+ "weakly_connected_components",
8
+ "is_weakly_connected",
9
+ ]
10
+
11
+
12
+ @not_implemented_for("undirected")
13
+ @nx._dispatchable
14
+ def weakly_connected_components(G):
15
+ """Generate weakly connected components of G.
16
+
17
+ Parameters
18
+ ----------
19
+ G : NetworkX graph
20
+ A directed graph
21
+
22
+ Returns
23
+ -------
24
+ comp : generator of sets
25
+ A generator of sets of nodes, one for each weakly connected
26
+ component of G.
27
+
28
+ Raises
29
+ ------
30
+ NetworkXNotImplemented
31
+ If G is undirected.
32
+
33
+ Examples
34
+ --------
35
+ Generate a sorted list of weakly connected components, largest first.
36
+
37
+ >>> G = nx.path_graph(4, create_using=nx.DiGraph())
38
+ >>> nx.add_path(G, [10, 11, 12])
39
+ >>> [len(c) for c in sorted(nx.weakly_connected_components(G), key=len, reverse=True)]
40
+ [4, 3]
41
+
42
+ If you only want the largest component, it's more efficient to
43
+ use max instead of sort:
44
+
45
+ >>> largest_cc = max(nx.weakly_connected_components(G), key=len)
46
+
47
+ See Also
48
+ --------
49
+ connected_components
50
+ strongly_connected_components
51
+
52
+ Notes
53
+ -----
54
+ For directed graphs only.
55
+
56
+ """
57
+ seen = set()
58
+ for v in G:
59
+ if v not in seen:
60
+ c = set(_plain_bfs(G, v))
61
+ seen.update(c)
62
+ yield c
63
+
64
+
65
+ @not_implemented_for("undirected")
66
+ @nx._dispatchable
67
+ def number_weakly_connected_components(G):
68
+ """Returns the number of weakly connected components in G.
69
+
70
+ Parameters
71
+ ----------
72
+ G : NetworkX graph
73
+ A directed graph.
74
+
75
+ Returns
76
+ -------
77
+ n : integer
78
+ Number of weakly connected components
79
+
80
+ Raises
81
+ ------
82
+ NetworkXNotImplemented
83
+ If G is undirected.
84
+
85
+ Examples
86
+ --------
87
+ >>> G = nx.DiGraph([(0, 1), (2, 1), (3, 4)])
88
+ >>> nx.number_weakly_connected_components(G)
89
+ 2
90
+
91
+ See Also
92
+ --------
93
+ weakly_connected_components
94
+ number_connected_components
95
+ number_strongly_connected_components
96
+
97
+ Notes
98
+ -----
99
+ For directed graphs only.
100
+
101
+ """
102
+ return sum(1 for wcc in weakly_connected_components(G))
103
+
104
+
105
+ @not_implemented_for("undirected")
106
+ @nx._dispatchable
107
+ def is_weakly_connected(G):
108
+ """Test directed graph for weak connectivity.
109
+
110
+ A directed graph is weakly connected if and only if the graph
111
+ is connected when the direction of the edge between nodes is ignored.
112
+
113
+ Note that if a graph is strongly connected (i.e. the graph is connected
114
+ even when we account for directionality), it is by definition weakly
115
+ connected as well.
116
+
117
+ Parameters
118
+ ----------
119
+ G : NetworkX Graph
120
+ A directed graph.
121
+
122
+ Returns
123
+ -------
124
+ connected : bool
125
+ True if the graph is weakly connected, False otherwise.
126
+
127
+ Raises
128
+ ------
129
+ NetworkXNotImplemented
130
+ If G is undirected.
131
+
132
+ Examples
133
+ --------
134
+ >>> G = nx.DiGraph([(0, 1), (2, 1)])
135
+ >>> G.add_node(3)
136
+ >>> nx.is_weakly_connected(G) # node 3 is not connected to the graph
137
+ False
138
+ >>> G.add_edge(2, 3)
139
+ >>> nx.is_weakly_connected(G)
140
+ True
141
+
142
+ See Also
143
+ --------
144
+ is_strongly_connected
145
+ is_semiconnected
146
+ is_connected
147
+ is_biconnected
148
+ weakly_connected_components
149
+
150
+ Notes
151
+ -----
152
+ For directed graphs only.
153
+
154
+ """
155
+ if len(G) == 0:
156
+ raise nx.NetworkXPointlessConcept(
157
+ """Connectivity is undefined for the null graph."""
158
+ )
159
+
160
+ return len(next(weakly_connected_components(G))) == len(G)
161
+
162
+
163
+ def _plain_bfs(G, source):
164
+ """A fast BFS node generator
165
+
166
+ The direction of the edge between nodes is ignored.
167
+
168
+ For directed graphs only.
169
+
170
+ """
171
+ n = len(G)
172
+ Gsucc = G._succ
173
+ Gpred = G._pred
174
+ seen = {source}
175
+ nextlevel = [source]
176
+
177
+ yield source
178
+ while nextlevel:
179
+ thislevel = nextlevel
180
+ nextlevel = []
181
+ for v in thislevel:
182
+ for w in Gsucc[v]:
183
+ if w not in seen:
184
+ seen.add(w)
185
+ nextlevel.append(w)
186
+ yield w
187
+ for w in Gpred[v]:
188
+ if w not in seen:
189
+ seen.add(w)
190
+ nextlevel.append(w)
191
+ yield w
192
+ if len(seen) == n:
193
+ return
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b17e66cdeda8edb8d1dec72626c77f1f65dd4675e3f76dc2fc4fd84aa038e30
3
+ size 18972
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from networkx.algorithms.isomorphism.isomorph import *
2
+ from networkx.algorithms.isomorphism.vf2userfunc import *
3
+ from networkx.algorithms.isomorphism.matchhelpers import *
4
+ from networkx.algorithms.isomorphism.temporalisomorphvf2 import *
5
+ from networkx.algorithms.isomorphism.ismags import *
6
+ from networkx.algorithms.isomorphism.tree_isomorphism import *
7
+ from networkx.algorithms.isomorphism.vf2pp import *
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py ADDED
@@ -0,0 +1,1163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ISMAGS Algorithm
3
+ ================
4
+
5
+ Provides a Python implementation of the ISMAGS algorithm. [1]_
6
+
7
+ It is capable of finding (subgraph) isomorphisms between two graphs, taking the
8
+ symmetry of the subgraph into account. In most cases the VF2 algorithm is
9
+ faster (at least on small graphs) than this implementation, but in some cases
10
+ there is an exponential number of isomorphisms that are symmetrically
11
+ equivalent. In that case, the ISMAGS algorithm will provide only one solution
12
+ per symmetry group.
13
+
14
+ >>> petersen = nx.petersen_graph()
15
+ >>> ismags = nx.isomorphism.ISMAGS(petersen, petersen)
16
+ >>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False))
17
+ >>> len(isomorphisms)
18
+ 120
19
+ >>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True))
20
+ >>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}]
21
+ >>> answer == isomorphisms
22
+ True
23
+
24
+ In addition, this implementation also provides an interface to find the
25
+ largest common induced subgraph [2]_ between any two graphs, again taking
26
+ symmetry into account. Given `graph` and `subgraph` the algorithm will remove
27
+ nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of
28
+ `graph`. Since only the symmetry of `subgraph` is taken into account it is
29
+ worth thinking about how you provide your graphs:
30
+
31
+ >>> graph1 = nx.path_graph(4)
32
+ >>> graph2 = nx.star_graph(3)
33
+ >>> ismags = nx.isomorphism.ISMAGS(graph1, graph2)
34
+ >>> ismags.is_isomorphic()
35
+ False
36
+ >>> largest_common_subgraph = list(ismags.largest_common_subgraph())
37
+ >>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}]
38
+ >>> answer == largest_common_subgraph
39
+ True
40
+ >>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1)
41
+ >>> largest_common_subgraph = list(ismags2.largest_common_subgraph())
42
+ >>> answer = [
43
+ ... {1: 0, 0: 1, 2: 2},
44
+ ... {1: 0, 0: 1, 3: 2},
45
+ ... {2: 0, 0: 1, 1: 2},
46
+ ... {2: 0, 0: 1, 3: 2},
47
+ ... {3: 0, 0: 1, 1: 2},
48
+ ... {3: 0, 0: 1, 2: 2},
49
+ ... ]
50
+ >>> answer == largest_common_subgraph
51
+ True
52
+
53
+ However, when not taking symmetry into account, it doesn't matter:
54
+
55
+ >>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False))
56
+ >>> answer = [
57
+ ... {1: 0, 0: 1, 2: 2},
58
+ ... {1: 0, 2: 1, 0: 2},
59
+ ... {2: 0, 1: 1, 3: 2},
60
+ ... {2: 0, 3: 1, 1: 2},
61
+ ... {1: 0, 0: 1, 2: 3},
62
+ ... {1: 0, 2: 1, 0: 3},
63
+ ... {2: 0, 1: 1, 3: 3},
64
+ ... {2: 0, 3: 1, 1: 3},
65
+ ... {1: 0, 0: 2, 2: 3},
66
+ ... {1: 0, 2: 2, 0: 3},
67
+ ... {2: 0, 1: 2, 3: 3},
68
+ ... {2: 0, 3: 2, 1: 3},
69
+ ... ]
70
+ >>> answer == largest_common_subgraph
71
+ True
72
+ >>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False))
73
+ >>> answer = [
74
+ ... {1: 0, 0: 1, 2: 2},
75
+ ... {1: 0, 0: 1, 3: 2},
76
+ ... {2: 0, 0: 1, 1: 2},
77
+ ... {2: 0, 0: 1, 3: 2},
78
+ ... {3: 0, 0: 1, 1: 2},
79
+ ... {3: 0, 0: 1, 2: 2},
80
+ ... {1: 1, 0: 2, 2: 3},
81
+ ... {1: 1, 0: 2, 3: 3},
82
+ ... {2: 1, 0: 2, 1: 3},
83
+ ... {2: 1, 0: 2, 3: 3},
84
+ ... {3: 1, 0: 2, 1: 3},
85
+ ... {3: 1, 0: 2, 2: 3},
86
+ ... ]
87
+ >>> answer == largest_common_subgraph
88
+ True
89
+
90
+ Notes
91
+ -----
92
+ - The current implementation works for undirected graphs only. The algorithm
93
+ in general should work for directed graphs as well though.
94
+ - Node keys for both provided graphs need to be fully orderable as well as
95
+ hashable.
96
+ - Node and edge equality is assumed to be transitive: if A is equal to B, and
97
+ B is equal to C, then A is equal to C.
98
+
99
+ References
100
+ ----------
101
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
102
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
103
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
104
+ Enumeration", PLoS One 9(5): e97896, 2014.
105
+ https://doi.org/10.1371/journal.pone.0097896
106
+ .. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
107
+ """
108
+
109
+ __all__ = ["ISMAGS"]
110
+
111
+ import itertools
112
+ from collections import Counter, defaultdict
113
+ from functools import reduce, wraps
114
+
115
+
116
+ def are_all_equal(iterable):
117
+ """
118
+ Returns ``True`` if and only if all elements in `iterable` are equal; and
119
+ ``False`` otherwise.
120
+
121
+ Parameters
122
+ ----------
123
+ iterable: collections.abc.Iterable
124
+ The container whose elements will be checked.
125
+
126
+ Returns
127
+ -------
128
+ bool
129
+ ``True`` iff all elements in `iterable` compare equal, ``False``
130
+ otherwise.
131
+ """
132
+ try:
133
+ shape = iterable.shape
134
+ except AttributeError:
135
+ pass
136
+ else:
137
+ if len(shape) > 1:
138
+ message = "The function does not works on multidimensional arrays."
139
+ raise NotImplementedError(message) from None
140
+
141
+ iterator = iter(iterable)
142
+ first = next(iterator, None)
143
+ return all(item == first for item in iterator)
144
+
145
+
146
+ def make_partitions(items, test):
147
+ """
148
+ Partitions items into sets based on the outcome of ``test(item1, item2)``.
149
+ Pairs of items for which `test` returns `True` end up in the same set.
150
+
151
+ Parameters
152
+ ----------
153
+ items : collections.abc.Iterable[collections.abc.Hashable]
154
+ Items to partition
155
+ test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable]
156
+ A function that will be called with 2 arguments, taken from items.
157
+ Should return `True` if those 2 items need to end up in the same
158
+ partition, and `False` otherwise.
159
+
160
+ Returns
161
+ -------
162
+ list[set]
163
+ A list of sets, with each set containing part of the items in `items`,
164
+ such that ``all(test(*pair) for pair in itertools.combinations(set, 2))
165
+ == True``
166
+
167
+ Notes
168
+ -----
169
+ The function `test` is assumed to be transitive: if ``test(a, b)`` and
170
+ ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``.
171
+ """
172
+ partitions = []
173
+ for item in items:
174
+ for partition in partitions:
175
+ p_item = next(iter(partition))
176
+ if test(item, p_item):
177
+ partition.add(item)
178
+ break
179
+ else: # No break
180
+ partitions.append({item})
181
+ return partitions
182
+
183
+
184
+ def partition_to_color(partitions):
185
+ """
186
+ Creates a dictionary that maps each item in each partition to the index of
187
+ the partition to which it belongs.
188
+
189
+ Parameters
190
+ ----------
191
+ partitions: collections.abc.Sequence[collections.abc.Iterable]
192
+ As returned by :func:`make_partitions`.
193
+
194
+ Returns
195
+ -------
196
+ dict
197
+ """
198
+ colors = {}
199
+ for color, keys in enumerate(partitions):
200
+ for key in keys:
201
+ colors[key] = color
202
+ return colors
203
+
204
+
205
+ def intersect(collection_of_sets):
206
+ """
207
+ Given an collection of sets, returns the intersection of those sets.
208
+
209
+ Parameters
210
+ ----------
211
+ collection_of_sets: collections.abc.Collection[set]
212
+ A collection of sets.
213
+
214
+ Returns
215
+ -------
216
+ set
217
+ An intersection of all sets in `collection_of_sets`. Will have the same
218
+ type as the item initially taken from `collection_of_sets`.
219
+ """
220
+ collection_of_sets = list(collection_of_sets)
221
+ first = collection_of_sets.pop()
222
+ out = reduce(set.intersection, collection_of_sets, set(first))
223
+ return type(first)(out)
224
+
225
+
226
+ class ISMAGS:
227
+ """
228
+ Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
229
+ "Index-based Subgraph Matching Algorithm with General Symmetries". As the
230
+ name implies, it is symmetry aware and will only generate non-symmetric
231
+ isomorphisms.
232
+
233
+ Notes
234
+ -----
235
+ The implementation imposes additional conditions compared to the VF2
236
+ algorithm on the graphs provided and the comparison functions
237
+ (:attr:`node_equality` and :attr:`edge_equality`):
238
+
239
+ - Node keys in both graphs must be orderable as well as hashable.
240
+ - Equality must be transitive: if A is equal to B, and B is equal to C,
241
+ then A must be equal to C.
242
+
243
+ Attributes
244
+ ----------
245
+ graph: networkx.Graph
246
+ subgraph: networkx.Graph
247
+ node_equality: collections.abc.Callable
248
+ The function called to see if two nodes should be considered equal.
249
+ It's signature looks like this:
250
+ ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``.
251
+ `node1` is a node in `graph1`, and `node2` a node in `graph2`.
252
+ Constructed from the argument `node_match`.
253
+ edge_equality: collections.abc.Callable
254
+ The function called to see if two edges should be considered equal.
255
+ It's signature looks like this:
256
+ ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``.
257
+ `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`.
258
+ Constructed from the argument `edge_match`.
259
+
260
+ References
261
+ ----------
262
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
263
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
264
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
265
+ Enumeration", PLoS One 9(5): e97896, 2014.
266
+ https://doi.org/10.1371/journal.pone.0097896
267
+ """
268
+
269
+ def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None):
270
+ """
271
+ Parameters
272
+ ----------
273
+ graph: networkx.Graph
274
+ subgraph: networkx.Graph
275
+ node_match: collections.abc.Callable or None
276
+ Function used to determine whether two nodes are equivalent. Its
277
+ signature should look like ``f(n1: dict, n2: dict) -> bool``, with
278
+ `n1` and `n2` node property dicts. See also
279
+ :func:`~networkx.algorithms.isomorphism.categorical_node_match` and
280
+ friends.
281
+ If `None`, all nodes are considered equal.
282
+ edge_match: collections.abc.Callable or None
283
+ Function used to determine whether two edges are equivalent. Its
284
+ signature should look like ``f(e1: dict, e2: dict) -> bool``, with
285
+ `e1` and `e2` edge property dicts. See also
286
+ :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and
287
+ friends.
288
+ If `None`, all edges are considered equal.
289
+ cache: collections.abc.Mapping
290
+ A cache used for caching graph symmetries.
291
+ """
292
+ # TODO: graph and subgraph setter methods that invalidate the caches.
293
+ # TODO: allow for precomputed partitions and colors
294
+ self.graph = graph
295
+ self.subgraph = subgraph
296
+ self._symmetry_cache = cache
297
+ # Naming conventions are taken from the original paper. For your
298
+ # sanity:
299
+ # sg: subgraph
300
+ # g: graph
301
+ # e: edge(s)
302
+ # n: node(s)
303
+ # So: sgn means "subgraph nodes".
304
+ self._sgn_partitions_ = None
305
+ self._sge_partitions_ = None
306
+
307
+ self._sgn_colors_ = None
308
+ self._sge_colors_ = None
309
+
310
+ self._gn_partitions_ = None
311
+ self._ge_partitions_ = None
312
+
313
+ self._gn_colors_ = None
314
+ self._ge_colors_ = None
315
+
316
+ self._node_compat_ = None
317
+ self._edge_compat_ = None
318
+
319
+ if node_match is None:
320
+ self.node_equality = self._node_match_maker(lambda n1, n2: True)
321
+ self._sgn_partitions_ = [set(self.subgraph.nodes)]
322
+ self._gn_partitions_ = [set(self.graph.nodes)]
323
+ self._node_compat_ = {0: 0}
324
+ else:
325
+ self.node_equality = self._node_match_maker(node_match)
326
+ if edge_match is None:
327
+ self.edge_equality = self._edge_match_maker(lambda e1, e2: True)
328
+ self._sge_partitions_ = [set(self.subgraph.edges)]
329
+ self._ge_partitions_ = [set(self.graph.edges)]
330
+ self._edge_compat_ = {0: 0}
331
+ else:
332
+ self.edge_equality = self._edge_match_maker(edge_match)
333
+
334
+ @property
335
+ def _sgn_partitions(self):
336
+ if self._sgn_partitions_ is None:
337
+
338
+ def nodematch(node1, node2):
339
+ return self.node_equality(self.subgraph, node1, self.subgraph, node2)
340
+
341
+ self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch)
342
+ return self._sgn_partitions_
343
+
344
+ @property
345
+ def _sge_partitions(self):
346
+ if self._sge_partitions_ is None:
347
+
348
+ def edgematch(edge1, edge2):
349
+ return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2)
350
+
351
+ self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch)
352
+ return self._sge_partitions_
353
+
354
+ @property
355
+ def _gn_partitions(self):
356
+ if self._gn_partitions_ is None:
357
+
358
+ def nodematch(node1, node2):
359
+ return self.node_equality(self.graph, node1, self.graph, node2)
360
+
361
+ self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch)
362
+ return self._gn_partitions_
363
+
364
+ @property
365
+ def _ge_partitions(self):
366
+ if self._ge_partitions_ is None:
367
+
368
+ def edgematch(edge1, edge2):
369
+ return self.edge_equality(self.graph, edge1, self.graph, edge2)
370
+
371
+ self._ge_partitions_ = make_partitions(self.graph.edges, edgematch)
372
+ return self._ge_partitions_
373
+
374
+ @property
375
+ def _sgn_colors(self):
376
+ if self._sgn_colors_ is None:
377
+ self._sgn_colors_ = partition_to_color(self._sgn_partitions)
378
+ return self._sgn_colors_
379
+
380
+ @property
381
+ def _sge_colors(self):
382
+ if self._sge_colors_ is None:
383
+ self._sge_colors_ = partition_to_color(self._sge_partitions)
384
+ return self._sge_colors_
385
+
386
+ @property
387
+ def _gn_colors(self):
388
+ if self._gn_colors_ is None:
389
+ self._gn_colors_ = partition_to_color(self._gn_partitions)
390
+ return self._gn_colors_
391
+
392
+ @property
393
+ def _ge_colors(self):
394
+ if self._ge_colors_ is None:
395
+ self._ge_colors_ = partition_to_color(self._ge_partitions)
396
+ return self._ge_colors_
397
+
398
+ @property
399
+ def _node_compatibility(self):
400
+ if self._node_compat_ is not None:
401
+ return self._node_compat_
402
+ self._node_compat_ = {}
403
+ for sgn_part_color, gn_part_color in itertools.product(
404
+ range(len(self._sgn_partitions)), range(len(self._gn_partitions))
405
+ ):
406
+ sgn = next(iter(self._sgn_partitions[sgn_part_color]))
407
+ gn = next(iter(self._gn_partitions[gn_part_color]))
408
+ if self.node_equality(self.subgraph, sgn, self.graph, gn):
409
+ self._node_compat_[sgn_part_color] = gn_part_color
410
+ return self._node_compat_
411
+
412
+ @property
413
+ def _edge_compatibility(self):
414
+ if self._edge_compat_ is not None:
415
+ return self._edge_compat_
416
+ self._edge_compat_ = {}
417
+ for sge_part_color, ge_part_color in itertools.product(
418
+ range(len(self._sge_partitions)), range(len(self._ge_partitions))
419
+ ):
420
+ sge = next(iter(self._sge_partitions[sge_part_color]))
421
+ ge = next(iter(self._ge_partitions[ge_part_color]))
422
+ if self.edge_equality(self.subgraph, sge, self.graph, ge):
423
+ self._edge_compat_[sge_part_color] = ge_part_color
424
+ return self._edge_compat_
425
+
426
+ @staticmethod
427
+ def _node_match_maker(cmp):
428
+ @wraps(cmp)
429
+ def comparer(graph1, node1, graph2, node2):
430
+ return cmp(graph1.nodes[node1], graph2.nodes[node2])
431
+
432
+ return comparer
433
+
434
+ @staticmethod
435
+ def _edge_match_maker(cmp):
436
+ @wraps(cmp)
437
+ def comparer(graph1, edge1, graph2, edge2):
438
+ return cmp(graph1.edges[edge1], graph2.edges[edge2])
439
+
440
+ return comparer
441
+
442
+ def find_isomorphisms(self, symmetry=True):
443
+ """Find all subgraph isomorphisms between subgraph and graph
444
+
445
+ Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`.
446
+
447
+ Parameters
448
+ ----------
449
+ symmetry: bool
450
+ Whether symmetry should be taken into account. If False, found
451
+ isomorphisms may be symmetrically equivalent.
452
+
453
+ Yields
454
+ ------
455
+ dict
456
+ The found isomorphism mappings of {graph_node: subgraph_node}.
457
+ """
458
+ # The networkx VF2 algorithm is slightly funny in when it yields an
459
+ # empty dict and when not.
460
+ if not self.subgraph:
461
+ yield {}
462
+ return
463
+ elif not self.graph:
464
+ return
465
+ elif len(self.graph) < len(self.subgraph):
466
+ return
467
+
468
+ if symmetry:
469
+ _, cosets = self.analyze_symmetry(
470
+ self.subgraph, self._sgn_partitions, self._sge_colors
471
+ )
472
+ constraints = self._make_constraints(cosets)
473
+ else:
474
+ constraints = []
475
+
476
+ candidates = self._find_nodecolor_candidates()
477
+ la_candidates = self._get_lookahead_candidates()
478
+ for sgn in self.subgraph:
479
+ extra_candidates = la_candidates[sgn]
480
+ if extra_candidates:
481
+ candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)}
482
+
483
+ if any(candidates.values()):
484
+ start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len))
485
+ candidates[start_sgn] = (intersect(candidates[start_sgn]),)
486
+ yield from self._map_nodes(start_sgn, candidates, constraints)
487
+ else:
488
+ return
489
+
490
+ @staticmethod
491
+ def _find_neighbor_color_count(graph, node, node_color, edge_color):
492
+ """
493
+ For `node` in `graph`, count the number of edges of a specific color
494
+ it has to nodes of a specific color.
495
+ """
496
+ counts = Counter()
497
+ neighbors = graph[node]
498
+ for neighbor in neighbors:
499
+ n_color = node_color[neighbor]
500
+ if (node, neighbor) in edge_color:
501
+ e_color = edge_color[node, neighbor]
502
+ else:
503
+ e_color = edge_color[neighbor, node]
504
+ counts[e_color, n_color] += 1
505
+ return counts
506
+
507
+ def _get_lookahead_candidates(self):
508
+ """
509
+ Returns a mapping of {subgraph node: collection of graph nodes} for
510
+ which the graph nodes are feasible candidates for the subgraph node, as
511
+ determined by looking ahead one edge.
512
+ """
513
+ g_counts = {}
514
+ for gn in self.graph:
515
+ g_counts[gn] = self._find_neighbor_color_count(
516
+ self.graph, gn, self._gn_colors, self._ge_colors
517
+ )
518
+ candidates = defaultdict(set)
519
+ for sgn in self.subgraph:
520
+ sg_count = self._find_neighbor_color_count(
521
+ self.subgraph, sgn, self._sgn_colors, self._sge_colors
522
+ )
523
+ new_sg_count = Counter()
524
+ for (sge_color, sgn_color), count in sg_count.items():
525
+ try:
526
+ ge_color = self._edge_compatibility[sge_color]
527
+ gn_color = self._node_compatibility[sgn_color]
528
+ except KeyError:
529
+ pass
530
+ else:
531
+ new_sg_count[ge_color, gn_color] = count
532
+
533
+ for gn, g_count in g_counts.items():
534
+ if all(new_sg_count[x] <= g_count[x] for x in new_sg_count):
535
+ # Valid candidate
536
+ candidates[sgn].add(gn)
537
+ return candidates
538
+
539
+ def largest_common_subgraph(self, symmetry=True):
540
+ """
541
+ Find the largest common induced subgraphs between :attr:`subgraph` and
542
+ :attr:`graph`.
543
+
544
+ Parameters
545
+ ----------
546
+ symmetry: bool
547
+ Whether symmetry should be taken into account. If False, found
548
+ largest common subgraphs may be symmetrically equivalent.
549
+
550
+ Yields
551
+ ------
552
+ dict
553
+ The found isomorphism mappings of {graph_node: subgraph_node}.
554
+ """
555
+ # The networkx VF2 algorithm is slightly funny in when it yields an
556
+ # empty dict and when not.
557
+ if not self.subgraph:
558
+ yield {}
559
+ return
560
+ elif not self.graph:
561
+ return
562
+
563
+ if symmetry:
564
+ _, cosets = self.analyze_symmetry(
565
+ self.subgraph, self._sgn_partitions, self._sge_colors
566
+ )
567
+ constraints = self._make_constraints(cosets)
568
+ else:
569
+ constraints = []
570
+
571
+ candidates = self._find_nodecolor_candidates()
572
+
573
+ if any(candidates.values()):
574
+ yield from self._largest_common_subgraph(candidates, constraints)
575
+ else:
576
+ return
577
+
578
+ def analyze_symmetry(self, graph, node_partitions, edge_colors):
579
+ """
580
+ Find a minimal set of permutations and corresponding co-sets that
581
+ describe the symmetry of `graph`, given the node and edge equalities
582
+ given by `node_partitions` and `edge_colors`, respectively.
583
+
584
+ Parameters
585
+ ----------
586
+ graph : networkx.Graph
587
+ The graph whose symmetry should be analyzed.
588
+ node_partitions : list of sets
589
+ A list of sets containing node keys. Node keys in the same set
590
+ are considered equivalent. Every node key in `graph` should be in
591
+ exactly one of the sets. If all nodes are equivalent, this should
592
+ be ``[set(graph.nodes)]``.
593
+ edge_colors : dict mapping edges to their colors
594
+ A dict mapping every edge in `graph` to its corresponding color.
595
+ Edges with the same color are considered equivalent. If all edges
596
+ are equivalent, this should be ``{e: 0 for e in graph.edges}``.
597
+
598
+
599
+ Returns
600
+ -------
601
+ set[frozenset]
602
+ The found permutations. This is a set of frozensets of pairs of node
603
+ keys which can be exchanged without changing :attr:`subgraph`.
604
+ dict[collections.abc.Hashable, set[collections.abc.Hashable]]
605
+ The found co-sets. The co-sets is a dictionary of
606
+ ``{node key: set of node keys}``.
607
+ Every key-value pair describes which ``values`` can be interchanged
608
+ without changing nodes less than ``key``.
609
+ """
610
+ if self._symmetry_cache is not None:
611
+ key = hash(
612
+ (
613
+ tuple(graph.nodes),
614
+ tuple(graph.edges),
615
+ tuple(map(tuple, node_partitions)),
616
+ tuple(edge_colors.items()),
617
+ )
618
+ )
619
+ if key in self._symmetry_cache:
620
+ return self._symmetry_cache[key]
621
+ node_partitions = list(
622
+ self._refine_node_partitions(graph, node_partitions, edge_colors)
623
+ )
624
+ assert len(node_partitions) == 1
625
+ node_partitions = node_partitions[0]
626
+ permutations, cosets = self._process_ordered_pair_partitions(
627
+ graph, node_partitions, node_partitions, edge_colors
628
+ )
629
+ if self._symmetry_cache is not None:
630
+ self._symmetry_cache[key] = permutations, cosets
631
+ return permutations, cosets
632
+
633
+ def is_isomorphic(self, symmetry=False):
634
+ """
635
+ Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and
636
+ False otherwise.
637
+
638
+ Returns
639
+ -------
640
+ bool
641
+ """
642
+ return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(
643
+ symmetry
644
+ )
645
+
646
+ def subgraph_is_isomorphic(self, symmetry=False):
647
+ """
648
+ Returns True if a subgraph of :attr:`graph` is isomorphic to
649
+ :attr:`subgraph` and False otherwise.
650
+
651
+ Returns
652
+ -------
653
+ bool
654
+ """
655
+ # symmetry=False, since we only need to know whether there is any
656
+ # example; figuring out all symmetry elements probably costs more time
657
+ # than it gains.
658
+ isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None)
659
+ return isom is not None
660
+
661
+ def isomorphisms_iter(self, symmetry=True):
662
+ """
663
+ Does the same as :meth:`find_isomorphisms` if :attr:`graph` and
664
+ :attr:`subgraph` have the same number of nodes.
665
+ """
666
+ if len(self.graph) == len(self.subgraph):
667
+ yield from self.subgraph_isomorphisms_iter(symmetry=symmetry)
668
+
669
+ def subgraph_isomorphisms_iter(self, symmetry=True):
670
+ """Alternative name for :meth:`find_isomorphisms`."""
671
+ return self.find_isomorphisms(symmetry)
672
+
673
+ def _find_nodecolor_candidates(self):
674
+ """
675
+ Per node in subgraph find all nodes in graph that have the same color.
676
+ """
677
+ candidates = defaultdict(set)
678
+ for sgn in self.subgraph.nodes:
679
+ sgn_color = self._sgn_colors[sgn]
680
+ if sgn_color in self._node_compatibility:
681
+ gn_color = self._node_compatibility[sgn_color]
682
+ candidates[sgn].add(frozenset(self._gn_partitions[gn_color]))
683
+ else:
684
+ candidates[sgn].add(frozenset())
685
+ candidates = dict(candidates)
686
+ for sgn, options in candidates.items():
687
+ candidates[sgn] = frozenset(options)
688
+ return candidates
689
+
690
+ @staticmethod
691
+ def _make_constraints(cosets):
692
+ """
693
+ Turn cosets into constraints.
694
+ """
695
+ constraints = []
696
+ for node_i, node_ts in cosets.items():
697
+ for node_t in node_ts:
698
+ if node_i != node_t:
699
+ # Node i must be smaller than node t.
700
+ constraints.append((node_i, node_t))
701
+ return constraints
702
+
703
+ @staticmethod
704
+ def _find_node_edge_color(graph, node_colors, edge_colors):
705
+ """
706
+ For every node in graph, come up with a color that combines 1) the
707
+ color of the node, and 2) the number of edges of a color to each type
708
+ of node.
709
+ """
710
+ counts = defaultdict(lambda: defaultdict(int))
711
+ for node1, node2 in graph.edges:
712
+ if (node1, node2) in edge_colors:
713
+ # FIXME directed graphs
714
+ ecolor = edge_colors[node1, node2]
715
+ else:
716
+ ecolor = edge_colors[node2, node1]
717
+ # Count per node how many edges it has of what color to nodes of
718
+ # what color
719
+ counts[node1][ecolor, node_colors[node2]] += 1
720
+ counts[node2][ecolor, node_colors[node1]] += 1
721
+
722
+ node_edge_colors = {}
723
+ for node in graph.nodes:
724
+ node_edge_colors[node] = node_colors[node], set(counts[node].items())
725
+
726
+ return node_edge_colors
727
+
728
+ @staticmethod
729
+ def _get_permutations_by_length(items):
730
+ """
731
+ Get all permutations of items, but only permute items with the same
732
+ length.
733
+
734
+ >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]]))
735
+ >>> answer = [
736
+ ... (([1], [2]), ([3, 4], [4, 5])),
737
+ ... (([1], [2]), ([4, 5], [3, 4])),
738
+ ... (([2], [1]), ([3, 4], [4, 5])),
739
+ ... (([2], [1]), ([4, 5], [3, 4])),
740
+ ... ]
741
+ >>> found == answer
742
+ True
743
+ """
744
+ by_len = defaultdict(list)
745
+ for item in items:
746
+ by_len[len(item)].append(item)
747
+
748
+ yield from itertools.product(
749
+ *(itertools.permutations(by_len[l]) for l in sorted(by_len))
750
+ )
751
+
752
+ @classmethod
753
+ def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False):
754
+ """
755
+ Given a partition of nodes in graph, make the partitions smaller such
756
+ that all nodes in a partition have 1) the same color, and 2) the same
757
+ number of edges to specific other partitions.
758
+ """
759
+
760
+ def equal_color(node1, node2):
761
+ return node_edge_colors[node1] == node_edge_colors[node2]
762
+
763
+ node_partitions = list(node_partitions)
764
+ node_colors = partition_to_color(node_partitions)
765
+ node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors)
766
+ if all(
767
+ are_all_equal(node_edge_colors[node] for node in partition)
768
+ for partition in node_partitions
769
+ ):
770
+ yield node_partitions
771
+ return
772
+
773
+ new_partitions = []
774
+ output = [new_partitions]
775
+ for partition in node_partitions:
776
+ if not are_all_equal(node_edge_colors[node] for node in partition):
777
+ refined = make_partitions(partition, equal_color)
778
+ if (
779
+ branch
780
+ and len(refined) != 1
781
+ and len({len(r) for r in refined}) != len([len(r) for r in refined])
782
+ ):
783
+ # This is where it breaks. There are multiple new cells
784
+ # in refined with the same length, and their order
785
+ # matters.
786
+ # So option 1) Hit it with a big hammer and simply make all
787
+ # orderings.
788
+ permutations = cls._get_permutations_by_length(refined)
789
+ new_output = []
790
+ for n_p in output:
791
+ for permutation in permutations:
792
+ new_output.append(n_p + list(permutation[0]))
793
+ output = new_output
794
+ else:
795
+ for n_p in output:
796
+ n_p.extend(sorted(refined, key=len))
797
+ else:
798
+ for n_p in output:
799
+ n_p.append(partition)
800
+ for n_p in output:
801
+ yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch)
802
+
803
+ def _edges_of_same_color(self, sgn1, sgn2):
804
+ """
805
+ Returns all edges in :attr:`graph` that have the same colour as the
806
+ edge between sgn1 and sgn2 in :attr:`subgraph`.
807
+ """
808
+ if (sgn1, sgn2) in self._sge_colors:
809
+ # FIXME directed graphs
810
+ sge_color = self._sge_colors[sgn1, sgn2]
811
+ else:
812
+ sge_color = self._sge_colors[sgn2, sgn1]
813
+ if sge_color in self._edge_compatibility:
814
+ ge_color = self._edge_compatibility[sge_color]
815
+ g_edges = self._ge_partitions[ge_color]
816
+ else:
817
+ g_edges = []
818
+ return g_edges
819
+
820
+ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None):
821
+ """
822
+ Find all subgraph isomorphisms honoring constraints.
823
+ """
824
+ if mapping is None:
825
+ mapping = {}
826
+ else:
827
+ mapping = mapping.copy()
828
+ if to_be_mapped is None:
829
+ to_be_mapped = set(self.subgraph.nodes)
830
+
831
+ # Note, we modify candidates here. Doesn't seem to affect results, but
832
+ # remember this.
833
+ # candidates = candidates.copy()
834
+ sgn_candidates = intersect(candidates[sgn])
835
+ candidates[sgn] = frozenset([sgn_candidates])
836
+ for gn in sgn_candidates:
837
+ # We're going to try to map sgn to gn.
838
+ if gn in mapping.values() or sgn not in to_be_mapped:
839
+ # gn is already mapped to something
840
+ continue # pragma: no cover
841
+
842
+ # REDUCTION and COMBINATION
843
+ mapping[sgn] = gn
844
+ # BASECASE
845
+ if to_be_mapped == set(mapping.keys()):
846
+ yield {v: k for k, v in mapping.items()}
847
+ continue
848
+ left_to_map = to_be_mapped - set(mapping.keys())
849
+
850
+ new_candidates = candidates.copy()
851
+ sgn_nbrs = set(self.subgraph[sgn])
852
+ not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn])
853
+ for sgn2 in left_to_map:
854
+ if sgn2 not in sgn_nbrs:
855
+ gn2_options = not_gn_nbrs
856
+ else:
857
+ # Get all edges to gn of the right color:
858
+ g_edges = self._edges_of_same_color(sgn, sgn2)
859
+ # FIXME directed graphs
860
+ # And all nodes involved in those which are connected to gn
861
+ gn2_options = {n for e in g_edges for n in e if gn in e}
862
+ # Node color compatibility should be taken care of by the
863
+ # initial candidate lists made by find_subgraphs
864
+
865
+ # Add gn2_options to the right collection. Since new_candidates
866
+ # is a dict of frozensets of frozensets of node indices it's
867
+ # a bit clunky. We can't do .add, and + also doesn't work. We
868
+ # could do |, but I deem union to be clearer.
869
+ new_candidates[sgn2] = new_candidates[sgn2].union(
870
+ [frozenset(gn2_options)]
871
+ )
872
+
873
+ if (sgn, sgn2) in constraints:
874
+ gn2_options = {gn2 for gn2 in self.graph if gn2 > gn}
875
+ elif (sgn2, sgn) in constraints:
876
+ gn2_options = {gn2 for gn2 in self.graph if gn2 < gn}
877
+ else:
878
+ continue # pragma: no cover
879
+ new_candidates[sgn2] = new_candidates[sgn2].union(
880
+ [frozenset(gn2_options)]
881
+ )
882
+
883
+ # The next node is the one that is unmapped and has fewest
884
+ # candidates
885
+ next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len))
886
+ yield from self._map_nodes(
887
+ next_sgn,
888
+ new_candidates,
889
+ constraints,
890
+ mapping=mapping,
891
+ to_be_mapped=to_be_mapped,
892
+ )
893
+ # Unmap sgn-gn. Strictly not necessary since it'd get overwritten
894
+ # when making a new mapping for sgn.
895
+ # del mapping[sgn]
896
+
897
+ def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None):
898
+ """
899
+ Find all largest common subgraphs honoring constraints.
900
+ """
901
+ if to_be_mapped is None:
902
+ to_be_mapped = {frozenset(self.subgraph.nodes)}
903
+
904
+ # The LCS problem is basically a repeated subgraph isomorphism problem
905
+ # with smaller and smaller subgraphs. We store the nodes that are
906
+ # "part of" the subgraph in to_be_mapped, and we make it a little
907
+ # smaller every iteration.
908
+
909
+ current_size = len(next(iter(to_be_mapped), []))
910
+
911
+ found_iso = False
912
+ if current_size <= len(self.graph):
913
+ # There's no point in trying to find isomorphisms of
914
+ # graph >= subgraph if subgraph has more nodes than graph.
915
+
916
+ # Try the isomorphism first with the nodes with lowest ID. So sort
917
+ # them. Those are more likely to be part of the final
918
+ # correspondence. This makes finding the first answer(s) faster. In
919
+ # theory.
920
+ for nodes in sorted(to_be_mapped, key=sorted):
921
+ # Find the isomorphism between subgraph[to_be_mapped] <= graph
922
+ next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len))
923
+ isomorphs = self._map_nodes(
924
+ next_sgn, candidates, constraints, to_be_mapped=nodes
925
+ )
926
+
927
+ # This is effectively `yield from isomorphs`, except that we look
928
+ # whether an item was yielded.
929
+ try:
930
+ item = next(isomorphs)
931
+ except StopIteration:
932
+ pass
933
+ else:
934
+ yield item
935
+ yield from isomorphs
936
+ found_iso = True
937
+
938
+ # BASECASE
939
+ if found_iso or current_size == 1:
940
+ # Shrinking has no point because either 1) we end up with a smaller
941
+ # common subgraph (and we want the largest), or 2) there'll be no
942
+ # more subgraph.
943
+ return
944
+
945
+ left_to_be_mapped = set()
946
+ for nodes in to_be_mapped:
947
+ for sgn in nodes:
948
+ # We're going to remove sgn from to_be_mapped, but subject to
949
+ # symmetry constraints. We know that for every constraint we
950
+ # have those subgraph nodes are equal. So whenever we would
951
+ # remove the lower part of a constraint, remove the higher
952
+ # instead. This is all dealth with by _remove_node. And because
953
+ # left_to_be_mapped is a set, we don't do double work.
954
+
955
+ # And finally, make the subgraph one node smaller.
956
+ # REDUCTION
957
+ new_nodes = self._remove_node(sgn, nodes, constraints)
958
+ left_to_be_mapped.add(new_nodes)
959
+ # COMBINATION
960
+ yield from self._largest_common_subgraph(
961
+ candidates, constraints, to_be_mapped=left_to_be_mapped
962
+ )
963
+
964
+ @staticmethod
965
+ def _remove_node(node, nodes, constraints):
966
+ """
967
+ Returns a new set where node has been removed from nodes, subject to
968
+ symmetry constraints. We know, that for every constraint we have
969
+ those subgraph nodes are equal. So whenever we would remove the
970
+ lower part of a constraint, remove the higher instead.
971
+ """
972
+ while True:
973
+ for low, high in constraints:
974
+ if low == node and high in nodes:
975
+ node = high
976
+ break
977
+ else: # no break, couldn't find node in constraints
978
+ break
979
+ return frozenset(nodes - {node})
980
+
981
+ @staticmethod
982
+ def _find_permutations(top_partitions, bottom_partitions):
983
+ """
984
+ Return the pairs of top/bottom partitions where the partitions are
985
+ different. Ensures that all partitions in both top and bottom
986
+ partitions have size 1.
987
+ """
988
+ # Find permutations
989
+ permutations = set()
990
+ for top, bot in zip(top_partitions, bottom_partitions):
991
+ # top and bot have only one element
992
+ if len(top) != 1 or len(bot) != 1:
993
+ raise IndexError(
994
+ "Not all nodes are coupled. This is"
995
+ f" impossible: {top_partitions}, {bottom_partitions}"
996
+ )
997
+ if top != bot:
998
+ permutations.add(frozenset((next(iter(top)), next(iter(bot)))))
999
+ return permutations
1000
+
1001
+ @staticmethod
1002
+ def _update_orbits(orbits, permutations):
1003
+ """
1004
+ Update orbits based on permutations. Orbits is modified in place.
1005
+ For every pair of items in permutations their respective orbits are
1006
+ merged.
1007
+ """
1008
+ for permutation in permutations:
1009
+ node, node2 = permutation
1010
+ # Find the orbits that contain node and node2, and replace the
1011
+ # orbit containing node with the union
1012
+ first = second = None
1013
+ for idx, orbit in enumerate(orbits):
1014
+ if first is not None and second is not None:
1015
+ break
1016
+ if node in orbit:
1017
+ first = idx
1018
+ if node2 in orbit:
1019
+ second = idx
1020
+ if first != second:
1021
+ orbits[first].update(orbits[second])
1022
+ del orbits[second]
1023
+
1024
+ def _couple_nodes(
1025
+ self,
1026
+ top_partitions,
1027
+ bottom_partitions,
1028
+ pair_idx,
1029
+ t_node,
1030
+ b_node,
1031
+ graph,
1032
+ edge_colors,
1033
+ ):
1034
+ """
1035
+ Generate new partitions from top and bottom_partitions where t_node is
1036
+ coupled to b_node. pair_idx is the index of the partitions where t_ and
1037
+ b_node can be found.
1038
+ """
1039
+ t_partition = top_partitions[pair_idx]
1040
+ b_partition = bottom_partitions[pair_idx]
1041
+ assert t_node in t_partition and b_node in b_partition
1042
+ # Couple node to node2. This means they get their own partition
1043
+ new_top_partitions = [top.copy() for top in top_partitions]
1044
+ new_bottom_partitions = [bot.copy() for bot in bottom_partitions]
1045
+ new_t_groups = {t_node}, t_partition - {t_node}
1046
+ new_b_groups = {b_node}, b_partition - {b_node}
1047
+ # Replace the old partitions with the coupled ones
1048
+ del new_top_partitions[pair_idx]
1049
+ del new_bottom_partitions[pair_idx]
1050
+ new_top_partitions[pair_idx:pair_idx] = new_t_groups
1051
+ new_bottom_partitions[pair_idx:pair_idx] = new_b_groups
1052
+
1053
+ new_top_partitions = self._refine_node_partitions(
1054
+ graph, new_top_partitions, edge_colors
1055
+ )
1056
+ new_bottom_partitions = self._refine_node_partitions(
1057
+ graph, new_bottom_partitions, edge_colors, branch=True
1058
+ )
1059
+ new_top_partitions = list(new_top_partitions)
1060
+ assert len(new_top_partitions) == 1
1061
+ new_top_partitions = new_top_partitions[0]
1062
+ for bot in new_bottom_partitions:
1063
+ yield list(new_top_partitions), bot
1064
+
1065
+ def _process_ordered_pair_partitions(
1066
+ self,
1067
+ graph,
1068
+ top_partitions,
1069
+ bottom_partitions,
1070
+ edge_colors,
1071
+ orbits=None,
1072
+ cosets=None,
1073
+ ):
1074
+ """
1075
+ Processes ordered pair partitions as per the reference paper. Finds and
1076
+ returns all permutations and cosets that leave the graph unchanged.
1077
+ """
1078
+ if orbits is None:
1079
+ orbits = [{node} for node in graph.nodes]
1080
+ else:
1081
+ # Note that we don't copy orbits when we are given one. This means
1082
+ # we leak information between the recursive branches. This is
1083
+ # intentional!
1084
+ orbits = orbits
1085
+ if cosets is None:
1086
+ cosets = {}
1087
+ else:
1088
+ cosets = cosets.copy()
1089
+
1090
+ assert all(
1091
+ len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)
1092
+ )
1093
+
1094
+ # BASECASE
1095
+ if all(len(top) == 1 for top in top_partitions):
1096
+ # All nodes are mapped
1097
+ permutations = self._find_permutations(top_partitions, bottom_partitions)
1098
+ self._update_orbits(orbits, permutations)
1099
+ if permutations:
1100
+ return [permutations], cosets
1101
+ else:
1102
+ return [], cosets
1103
+
1104
+ permutations = []
1105
+ unmapped_nodes = {
1106
+ (node, idx)
1107
+ for idx, t_partition in enumerate(top_partitions)
1108
+ for node in t_partition
1109
+ if len(t_partition) > 1
1110
+ }
1111
+ node, pair_idx = min(unmapped_nodes)
1112
+ b_partition = bottom_partitions[pair_idx]
1113
+
1114
+ for node2 in sorted(b_partition):
1115
+ if len(b_partition) == 1:
1116
+ # Can never result in symmetry
1117
+ continue
1118
+ if node != node2 and any(
1119
+ node in orbit and node2 in orbit for orbit in orbits
1120
+ ):
1121
+ # Orbit prune branch
1122
+ continue
1123
+ # REDUCTION
1124
+ # Couple node to node2
1125
+ partitions = self._couple_nodes(
1126
+ top_partitions,
1127
+ bottom_partitions,
1128
+ pair_idx,
1129
+ node,
1130
+ node2,
1131
+ graph,
1132
+ edge_colors,
1133
+ )
1134
+ for opp in partitions:
1135
+ new_top_partitions, new_bottom_partitions = opp
1136
+
1137
+ new_perms, new_cosets = self._process_ordered_pair_partitions(
1138
+ graph,
1139
+ new_top_partitions,
1140
+ new_bottom_partitions,
1141
+ edge_colors,
1142
+ orbits,
1143
+ cosets,
1144
+ )
1145
+ # COMBINATION
1146
+ permutations += new_perms
1147
+ cosets.update(new_cosets)
1148
+
1149
+ mapped = {
1150
+ k
1151
+ for top, bottom in zip(top_partitions, bottom_partitions)
1152
+ for k in top
1153
+ if len(top) == 1 and top == bottom
1154
+ }
1155
+ ks = {k for k in graph.nodes if k < node}
1156
+ # Have all nodes with ID < node been mapped?
1157
+ find_coset = ks <= mapped and node not in cosets
1158
+ if find_coset:
1159
+ # Find the orbit that contains node
1160
+ for orbit in orbits:
1161
+ if node in orbit:
1162
+ cosets[node] = orbit.copy()
1163
+ return permutations, cosets
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Graph isomorphism functions.
3
+ """
4
+ import networkx as nx
5
+ from networkx.exception import NetworkXError
6
+
7
+ __all__ = [
8
+ "could_be_isomorphic",
9
+ "fast_could_be_isomorphic",
10
+ "faster_could_be_isomorphic",
11
+ "is_isomorphic",
12
+ ]
13
+
14
+
15
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
16
+ def could_be_isomorphic(G1, G2):
17
+ """Returns False if graphs are definitely not isomorphic.
18
+ True does NOT guarantee isomorphism.
19
+
20
+ Parameters
21
+ ----------
22
+ G1, G2 : graphs
23
+ The two graphs G1 and G2 must be the same type.
24
+
25
+ Notes
26
+ -----
27
+ Checks for matching degree, triangle, and number of cliques sequences.
28
+ The triangle sequence contains the number of triangles each node is part of.
29
+ The clique sequence contains for each node the number of maximal cliques
30
+ involving that node.
31
+
32
+ """
33
+
34
+ # Check global properties
35
+ if G1.order() != G2.order():
36
+ return False
37
+
38
+ # Check local properties
39
+ d1 = G1.degree()
40
+ t1 = nx.triangles(G1)
41
+ clqs_1 = list(nx.find_cliques(G1))
42
+ c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1} # number of cliques
43
+ props1 = [[d, t1[v], c1[v]] for v, d in d1]
44
+ props1.sort()
45
+
46
+ d2 = G2.degree()
47
+ t2 = nx.triangles(G2)
48
+ clqs_2 = list(nx.find_cliques(G2))
49
+ c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2} # number of cliques
50
+ props2 = [[d, t2[v], c2[v]] for v, d in d2]
51
+ props2.sort()
52
+
53
+ if props1 != props2:
54
+ return False
55
+
56
+ # OK...
57
+ return True
58
+
59
+
60
+ graph_could_be_isomorphic = could_be_isomorphic
61
+
62
+
63
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
64
+ def fast_could_be_isomorphic(G1, G2):
65
+ """Returns False if graphs are definitely not isomorphic.
66
+
67
+ True does NOT guarantee isomorphism.
68
+
69
+ Parameters
70
+ ----------
71
+ G1, G2 : graphs
72
+ The two graphs G1 and G2 must be the same type.
73
+
74
+ Notes
75
+ -----
76
+ Checks for matching degree and triangle sequences. The triangle
77
+ sequence contains the number of triangles each node is part of.
78
+ """
79
+ # Check global properties
80
+ if G1.order() != G2.order():
81
+ return False
82
+
83
+ # Check local properties
84
+ d1 = G1.degree()
85
+ t1 = nx.triangles(G1)
86
+ props1 = [[d, t1[v]] for v, d in d1]
87
+ props1.sort()
88
+
89
+ d2 = G2.degree()
90
+ t2 = nx.triangles(G2)
91
+ props2 = [[d, t2[v]] for v, d in d2]
92
+ props2.sort()
93
+
94
+ if props1 != props2:
95
+ return False
96
+
97
+ # OK...
98
+ return True
99
+
100
+
101
+ fast_graph_could_be_isomorphic = fast_could_be_isomorphic
102
+
103
+
104
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
105
+ def faster_could_be_isomorphic(G1, G2):
106
+ """Returns False if graphs are definitely not isomorphic.
107
+
108
+ True does NOT guarantee isomorphism.
109
+
110
+ Parameters
111
+ ----------
112
+ G1, G2 : graphs
113
+ The two graphs G1 and G2 must be the same type.
114
+
115
+ Notes
116
+ -----
117
+ Checks for matching degree sequences.
118
+ """
119
+ # Check global properties
120
+ if G1.order() != G2.order():
121
+ return False
122
+
123
+ # Check local properties
124
+ d1 = sorted(d for n, d in G1.degree())
125
+ d2 = sorted(d for n, d in G2.degree())
126
+
127
+ if d1 != d2:
128
+ return False
129
+
130
+ # OK...
131
+ return True
132
+
133
+
134
+ faster_graph_could_be_isomorphic = faster_could_be_isomorphic
135
+
136
+
137
+ @nx._dispatchable(
138
+ graphs={"G1": 0, "G2": 1},
139
+ preserve_edge_attrs="edge_match",
140
+ preserve_node_attrs="node_match",
141
+ )
142
+ def is_isomorphic(G1, G2, node_match=None, edge_match=None):
143
+ """Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
144
+
145
+ Parameters
146
+ ----------
147
+ G1, G2: graphs
148
+ The two graphs G1 and G2 must be the same type.
149
+
150
+ node_match : callable
151
+ A function that returns True if node n1 in G1 and n2 in G2 should
152
+ be considered equal during the isomorphism test.
153
+ If node_match is not specified then node attributes are not considered.
154
+
155
+ The function will be called like
156
+
157
+ node_match(G1.nodes[n1], G2.nodes[n2]).
158
+
159
+ That is, the function will receive the node attribute dictionaries
160
+ for n1 and n2 as inputs.
161
+
162
+ edge_match : callable
163
+ A function that returns True if the edge attribute dictionary
164
+ for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
165
+ be considered equal during the isomorphism test. If edge_match is
166
+ not specified then edge attributes are not considered.
167
+
168
+ The function will be called like
169
+
170
+ edge_match(G1[u1][v1], G2[u2][v2]).
171
+
172
+ That is, the function will receive the edge attribute dictionaries
173
+ of the edges under consideration.
174
+
175
+ Notes
176
+ -----
177
+ Uses the vf2 algorithm [1]_.
178
+
179
+ Examples
180
+ --------
181
+ >>> import networkx.algorithms.isomorphism as iso
182
+
183
+ For digraphs G1 and G2, using 'weight' edge attribute (default: 1)
184
+
185
+ >>> G1 = nx.DiGraph()
186
+ >>> G2 = nx.DiGraph()
187
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=1)
188
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=2)
189
+ >>> em = iso.numerical_edge_match("weight", 1)
190
+ >>> nx.is_isomorphic(G1, G2) # no weights considered
191
+ True
192
+ >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights
193
+ False
194
+
195
+ For multidigraphs G1 and G2, using 'fill' node attribute (default: '')
196
+
197
+ >>> G1 = nx.MultiDiGraph()
198
+ >>> G2 = nx.MultiDiGraph()
199
+ >>> G1.add_nodes_from([1, 2, 3], fill="red")
200
+ >>> G2.add_nodes_from([10, 20, 30, 40], fill="red")
201
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5)
202
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=3)
203
+ >>> nm = iso.categorical_node_match("fill", "red")
204
+ >>> nx.is_isomorphic(G1, G2, node_match=nm)
205
+ True
206
+
207
+ For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7)
208
+
209
+ >>> G1.add_edge(1, 2, weight=7)
210
+ 1
211
+ >>> G2.add_edge(10, 20)
212
+ 1
213
+ >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6)
214
+ >>> nx.is_isomorphic(G1, G2, edge_match=em)
215
+ True
216
+
217
+ For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes
218
+ with default values 7 and 2.5. Also using 'fill' node attribute with
219
+ default value 'red'.
220
+
221
+ >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5])
222
+ >>> nm = iso.categorical_node_match("fill", "red")
223
+ >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm)
224
+ True
225
+
226
+ See Also
227
+ --------
228
+ numerical_node_match, numerical_edge_match, numerical_multiedge_match
229
+ categorical_node_match, categorical_edge_match, categorical_multiedge_match
230
+
231
+ References
232
+ ----------
233
+ .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento,
234
+ "An Improved Algorithm for Matching Large Graphs",
235
+ 3rd IAPR-TC15 Workshop on Graph-based Representations in
236
+ Pattern Recognition, Cuen, pp. 149-159, 2001.
237
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
238
+ """
239
+ if G1.is_directed() and G2.is_directed():
240
+ GM = nx.algorithms.isomorphism.DiGraphMatcher
241
+ elif (not G1.is_directed()) and (not G2.is_directed()):
242
+ GM = nx.algorithms.isomorphism.GraphMatcher
243
+ else:
244
+ raise NetworkXError("Graphs G1 and G2 are not of the same type.")
245
+
246
+ gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
247
+
248
+ return gm.is_isomorphic()
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py ADDED
@@ -0,0 +1,1065 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *************
3
+ VF2 Algorithm
4
+ *************
5
+
6
+ An implementation of VF2 algorithm for graph isomorphism testing.
7
+
8
+ The simplest interface to use this module is to call the
9
+ :func:`is_isomorphic <networkx.algorithms.isomorphism.is_isomorphic>`
10
+ function.
11
+
12
+ Introduction
13
+ ------------
14
+
15
+ The GraphMatcher and DiGraphMatcher are responsible for matching
16
+ graphs or directed graphs in a predetermined manner. This
17
+ usually means a check for an isomorphism, though other checks
18
+ are also possible. For example, a subgraph of one graph
19
+ can be checked for isomorphism to a second graph.
20
+
21
+ Matching is done via syntactic feasibility. It is also possible
22
+ to check for semantic feasibility. Feasibility, then, is defined
23
+ as the logical AND of the two functions.
24
+
25
+ To include a semantic check, the (Di)GraphMatcher class should be
26
+ subclassed, and the
27
+ :meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
28
+ function should be redefined. By default, the semantic feasibility function always
29
+ returns ``True``. The effect of this is that semantics are not
30
+ considered in the matching of G1 and G2.
31
+
32
+ Examples
33
+ --------
34
+
35
+ Suppose G1 and G2 are isomorphic graphs. Verification is as follows:
36
+
37
+ >>> from networkx.algorithms import isomorphism
38
+ >>> G1 = nx.path_graph(4)
39
+ >>> G2 = nx.path_graph(4)
40
+ >>> GM = isomorphism.GraphMatcher(G1, G2)
41
+ >>> GM.is_isomorphic()
42
+ True
43
+
44
+ GM.mapping stores the isomorphism mapping from G1 to G2.
45
+
46
+ >>> GM.mapping
47
+ {0: 0, 1: 1, 2: 2, 3: 3}
48
+
49
+
50
+ Suppose G1 and G2 are isomorphic directed graphs.
51
+ Verification is as follows:
52
+
53
+ >>> G1 = nx.path_graph(4, create_using=nx.DiGraph())
54
+ >>> G2 = nx.path_graph(4, create_using=nx.DiGraph())
55
+ >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
56
+ >>> DiGM.is_isomorphic()
57
+ True
58
+
59
+ DiGM.mapping stores the isomorphism mapping from G1 to G2.
60
+
61
+ >>> DiGM.mapping
62
+ {0: 0, 1: 1, 2: 2, 3: 3}
63
+
64
+
65
+
66
+ Subgraph Isomorphism
67
+ --------------------
68
+ Graph theory literature can be ambiguous about the meaning of the
69
+ above statement, and we seek to clarify it now.
70
+
71
+ In the VF2 literature, a mapping `M` is said to be a graph-subgraph
72
+ isomorphism iff `M` is an isomorphism between `G2` and a subgraph of `G1`.
73
+ Thus, to say that `G1` and `G2` are graph-subgraph isomorphic is to say
74
+ that a subgraph of `G1` is isomorphic to `G2`.
75
+
76
+ Other literature uses the phrase 'subgraph isomorphic' as in '`G1` does
77
+ not have a subgraph isomorphic to `G2`'. Another use is as an in adverb
78
+ for isomorphic. Thus, to say that `G1` and `G2` are subgraph isomorphic
79
+ is to say that a subgraph of `G1` is isomorphic to `G2`.
80
+
81
+ Finally, the term 'subgraph' can have multiple meanings. In this
82
+ context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced
83
+ subgraph isomorphisms are not directly supported, but one should be
84
+ able to perform the check by making use of
85
+ :func:`line_graph <networkx.generators.line.line_graph>`. For
86
+ subgraphs which are not induced, the term 'monomorphism' is preferred
87
+ over 'isomorphism'.
88
+
89
+ Let ``G = (N, E)`` be a graph with a set of nodes `N` and set of edges `E`.
90
+
91
+ If ``G' = (N', E')`` is a subgraph, then:
92
+ `N'` is a subset of `N` and
93
+ `E'` is a subset of `E`.
94
+
95
+ If ``G' = (N', E')`` is a node-induced subgraph, then:
96
+ `N'` is a subset of `N` and
97
+ `E'` is the subset of edges in `E` relating nodes in `N'`.
98
+
99
+ If `G' = (N', E')` is an edge-induced subgraph, then:
100
+ `N'` is the subset of nodes in `N` related by edges in `E'` and
101
+ `E'` is a subset of `E`.
102
+
103
+ If `G' = (N', E')` is a monomorphism, then:
104
+ `N'` is a subset of `N` and
105
+ `E'` is a subset of the set of edges in `E` relating nodes in `N'`.
106
+
107
+ Note that if `G'` is a node-induced subgraph of `G`, then it is always a
108
+ subgraph monomorphism of `G`, but the opposite is not always true, as a
109
+ monomorphism can have fewer edges.
110
+
111
+ References
112
+ ----------
113
+ [1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento,
114
+ "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs",
115
+ IEEE Transactions on Pattern Analysis and Machine Intelligence,
116
+ vol. 26, no. 10, pp. 1367-1372, Oct., 2004.
117
+ http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf
118
+
119
+ [2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved
120
+ Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
121
+ on Graph-based Representations in Pattern Recognition, Cuen,
122
+ pp. 149-159, 2001.
123
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
124
+
125
+ See Also
126
+ --------
127
+ :meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
128
+ :meth:`syntactic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.syntactic_feasibility>`
129
+
130
+ Notes
131
+ -----
132
+
133
+ The implementation handles both directed and undirected graphs as well
134
+ as multigraphs.
135
+
136
+ In general, the subgraph isomorphism problem is NP-complete whereas the
137
+ graph isomorphism problem is most likely not NP-complete (although no
138
+ polynomial-time algorithm is known to exist).
139
+
140
+ """
141
+
142
+ # This work was originally coded by Christopher Ellison
143
+ # as part of the Computational Mechanics Python (CMPy) project.
144
+ # James P. Crutchfield, principal investigator.
145
+ # Complexity Sciences Center and Physics Department, UC Davis.
146
+
147
+ import sys
148
+
149
+ __all__ = ["GraphMatcher", "DiGraphMatcher"]
150
+
151
+
152
+ class GraphMatcher:
153
+ """Implementation of VF2 algorithm for matching undirected graphs.
154
+
155
+ Suitable for Graph and MultiGraph instances.
156
+ """
157
+
158
+ def __init__(self, G1, G2):
159
+ """Initialize GraphMatcher.
160
+
161
+ Parameters
162
+ ----------
163
+ G1,G2: NetworkX Graph or MultiGraph instances.
164
+ The two graphs to check for isomorphism or monomorphism.
165
+
166
+ Examples
167
+ --------
168
+ To create a GraphMatcher which checks for syntactic feasibility:
169
+
170
+ >>> from networkx.algorithms import isomorphism
171
+ >>> G1 = nx.path_graph(4)
172
+ >>> G2 = nx.path_graph(4)
173
+ >>> GM = isomorphism.GraphMatcher(G1, G2)
174
+ """
175
+ self.G1 = G1
176
+ self.G2 = G2
177
+ self.G1_nodes = set(G1.nodes())
178
+ self.G2_nodes = set(G2.nodes())
179
+ self.G2_node_order = {n: i for i, n in enumerate(G2)}
180
+
181
+ # Set recursion limit.
182
+ self.old_recursion_limit = sys.getrecursionlimit()
183
+ expected_max_recursion_level = len(self.G2)
184
+ if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
185
+ # Give some breathing room.
186
+ sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
187
+
188
+ # Declare that we will be searching for a graph-graph isomorphism.
189
+ self.test = "graph"
190
+
191
+ # Initialize state
192
+ self.initialize()
193
+
194
+ def reset_recursion_limit(self):
195
+ """Restores the recursion limit."""
196
+ # TODO:
197
+ # Currently, we use recursion and set the recursion level higher.
198
+ # It would be nice to restore the level, but because the
199
+ # (Di)GraphMatcher classes make use of cyclic references, garbage
200
+ # collection will never happen when we define __del__() to
201
+ # restore the recursion level. The result is a memory leak.
202
+ # So for now, we do not automatically restore the recursion level,
203
+ # and instead provide a method to do this manually. Eventually,
204
+ # we should turn this into a non-recursive implementation.
205
+ sys.setrecursionlimit(self.old_recursion_limit)
206
+
207
+ def candidate_pairs_iter(self):
208
+ """Iterator over candidate pairs of nodes in G1 and G2."""
209
+
210
+ # All computations are done using the current state!
211
+
212
+ G1_nodes = self.G1_nodes
213
+ G2_nodes = self.G2_nodes
214
+ min_key = self.G2_node_order.__getitem__
215
+
216
+ # First we compute the inout-terminal sets.
217
+ T1_inout = [node for node in self.inout_1 if node not in self.core_1]
218
+ T2_inout = [node for node in self.inout_2 if node not in self.core_2]
219
+
220
+ # If T1_inout and T2_inout are both nonempty.
221
+ # P(s) = T1_inout x {min T2_inout}
222
+ if T1_inout and T2_inout:
223
+ node_2 = min(T2_inout, key=min_key)
224
+ for node_1 in T1_inout:
225
+ yield node_1, node_2
226
+
227
+ else:
228
+ # If T1_inout and T2_inout were both empty....
229
+ # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
230
+ # if not (T1_inout or T2_inout): # as suggested by [2], incorrect
231
+ if 1: # as inferred from [1], correct
232
+ # First we determine the candidate node for G2
233
+ other_node = min(G2_nodes - set(self.core_2), key=min_key)
234
+ for node in self.G1:
235
+ if node not in self.core_1:
236
+ yield node, other_node
237
+
238
+ # For all other cases, we don't have any candidate pairs.
239
+
240
+ def initialize(self):
241
+ """Reinitializes the state of the algorithm.
242
+
243
+ This method should be redefined if using something other than GMState.
244
+ If only subclassing GraphMatcher, a redefinition is not necessary.
245
+
246
+ """
247
+
248
+ # core_1[n] contains the index of the node paired with n, which is m,
249
+ # provided n is in the mapping.
250
+ # core_2[m] contains the index of the node paired with m, which is n,
251
+ # provided m is in the mapping.
252
+ self.core_1 = {}
253
+ self.core_2 = {}
254
+
255
+ # See the paper for definitions of M_x and T_x^{y}
256
+
257
+ # inout_1[n] is non-zero if n is in M_1 or in T_1^{inout}
258
+ # inout_2[m] is non-zero if m is in M_2 or in T_2^{inout}
259
+ #
260
+ # The value stored is the depth of the SSR tree when the node became
261
+ # part of the corresponding set.
262
+ self.inout_1 = {}
263
+ self.inout_2 = {}
264
+ # Practically, these sets simply store the nodes in the subgraph.
265
+
266
+ self.state = GMState(self)
267
+
268
+ # Provide a convenient way to access the isomorphism mapping.
269
+ self.mapping = self.core_1.copy()
270
+
271
+ def is_isomorphic(self):
272
+ """Returns True if G1 and G2 are isomorphic graphs."""
273
+
274
+ # Let's do two very quick checks!
275
+ # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)?
276
+ # For now, I just copy the code.
277
+
278
+ # Check global properties
279
+ if self.G1.order() != self.G2.order():
280
+ return False
281
+
282
+ # Check local properties
283
+ d1 = sorted(d for n, d in self.G1.degree())
284
+ d2 = sorted(d for n, d in self.G2.degree())
285
+ if d1 != d2:
286
+ return False
287
+
288
+ try:
289
+ x = next(self.isomorphisms_iter())
290
+ return True
291
+ except StopIteration:
292
+ return False
293
+
294
+ def isomorphisms_iter(self):
295
+ """Generator over isomorphisms between G1 and G2."""
296
+ # Declare that we are looking for a graph-graph isomorphism.
297
+ self.test = "graph"
298
+ self.initialize()
299
+ yield from self.match()
300
+
301
+ def match(self):
302
+ """Extends the isomorphism mapping.
303
+
304
+ This function is called recursively to determine if a complete
305
+ isomorphism can be found between G1 and G2. It cleans up the class
306
+ variables after each recursive call. If an isomorphism is found,
307
+ we yield the mapping.
308
+
309
+ """
310
+ if len(self.core_1) == len(self.G2):
311
+ # Save the final mapping, otherwise garbage collection deletes it.
312
+ self.mapping = self.core_1.copy()
313
+ # The mapping is complete.
314
+ yield self.mapping
315
+ else:
316
+ for G1_node, G2_node in self.candidate_pairs_iter():
317
+ if self.syntactic_feasibility(G1_node, G2_node):
318
+ if self.semantic_feasibility(G1_node, G2_node):
319
+ # Recursive call, adding the feasible state.
320
+ newstate = self.state.__class__(self, G1_node, G2_node)
321
+ yield from self.match()
322
+
323
+ # restore data structures
324
+ newstate.restore()
325
+
326
+ def semantic_feasibility(self, G1_node, G2_node):
327
+ """Returns True if adding (G1_node, G2_node) is semantically feasible.
328
+
329
+ The semantic feasibility function should return True if it is
330
+ acceptable to add the candidate pair (G1_node, G2_node) to the current
331
+ partial isomorphism mapping. The logic should focus on semantic
332
+ information contained in the edge data or a formalized node class.
333
+
334
+ By acceptable, we mean that the subsequent mapping can still become a
335
+ complete isomorphism mapping. Thus, if adding the candidate pair
336
+ definitely makes it so that the subsequent mapping cannot become a
337
+ complete isomorphism mapping, then this function must return False.
338
+
339
+ The default semantic feasibility function always returns True. The
340
+ effect is that semantics are not considered in the matching of G1
341
+ and G2.
342
+
343
+ The semantic checks might differ based on the what type of test is
344
+ being performed. A keyword description of the test is stored in
345
+ self.test. Here is a quick description of the currently implemented
346
+ tests::
347
+
348
+ test='graph'
349
+ Indicates that the graph matcher is looking for a graph-graph
350
+ isomorphism.
351
+
352
+ test='subgraph'
353
+ Indicates that the graph matcher is looking for a subgraph-graph
354
+ isomorphism such that a subgraph of G1 is isomorphic to G2.
355
+
356
+ test='mono'
357
+ Indicates that the graph matcher is looking for a subgraph-graph
358
+ monomorphism such that a subgraph of G1 is monomorphic to G2.
359
+
360
+ Any subclass which redefines semantic_feasibility() must maintain
361
+ the above form to keep the match() method functional. Implementations
362
+ should consider multigraphs.
363
+ """
364
+ return True
365
+
366
+ def subgraph_is_isomorphic(self):
367
+ """Returns True if a subgraph of G1 is isomorphic to G2."""
368
+ try:
369
+ x = next(self.subgraph_isomorphisms_iter())
370
+ return True
371
+ except StopIteration:
372
+ return False
373
+
374
+ def subgraph_is_monomorphic(self):
375
+ """Returns True if a subgraph of G1 is monomorphic to G2."""
376
+ try:
377
+ x = next(self.subgraph_monomorphisms_iter())
378
+ return True
379
+ except StopIteration:
380
+ return False
381
+
382
+ # subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent)
383
+
384
+ def subgraph_isomorphisms_iter(self):
385
+ """Generator over isomorphisms between a subgraph of G1 and G2."""
386
+ # Declare that we are looking for graph-subgraph isomorphism.
387
+ self.test = "subgraph"
388
+ self.initialize()
389
+ yield from self.match()
390
+
391
+ def subgraph_monomorphisms_iter(self):
392
+ """Generator over monomorphisms between a subgraph of G1 and G2."""
393
+ # Declare that we are looking for graph-subgraph monomorphism.
394
+ self.test = "mono"
395
+ self.initialize()
396
+ yield from self.match()
397
+
398
+ # subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent)
399
+
400
+ def syntactic_feasibility(self, G1_node, G2_node):
401
+ """Returns True if adding (G1_node, G2_node) is syntactically feasible.
402
+
403
+ This function returns True if it is adding the candidate pair
404
+ to the current partial isomorphism/monomorphism mapping is allowable.
405
+ The addition is allowable if the inclusion of the candidate pair does
406
+ not make it impossible for an isomorphism/monomorphism to be found.
407
+ """
408
+
409
+ # The VF2 algorithm was designed to work with graphs having, at most,
410
+ # one edge connecting any two nodes. This is not the case when
411
+ # dealing with an MultiGraphs.
412
+ #
413
+ # Basically, when we test the look-ahead rules R_neighbor, we will
414
+ # make sure that the number of edges are checked. We also add
415
+ # a R_self check to verify that the number of selfloops is acceptable.
416
+ #
417
+ # Users might be comparing Graph instances with MultiGraph instances.
418
+ # So the generic GraphMatcher class must work with MultiGraphs.
419
+ # Care must be taken since the value in the innermost dictionary is a
420
+ # singlet for Graph instances. For MultiGraphs, the value in the
421
+ # innermost dictionary is a list.
422
+
423
+ ###
424
+ # Test at each step to get a return value as soon as possible.
425
+ ###
426
+
427
+ # Look ahead 0
428
+
429
+ # R_self
430
+
431
+ # The number of selfloops for G1_node must equal the number of
432
+ # self-loops for G2_node. Without this check, we would fail on
433
+ # R_neighbor at the next recursion level. But it is good to prune the
434
+ # search tree now.
435
+
436
+ if self.test == "mono":
437
+ if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
438
+ G2_node, G2_node
439
+ ):
440
+ return False
441
+ else:
442
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
443
+ G2_node, G2_node
444
+ ):
445
+ return False
446
+
447
+ # R_neighbor
448
+
449
+ # For each neighbor n' of n in the partial mapping, the corresponding
450
+ # node m' is a neighbor of m, and vice versa. Also, the number of
451
+ # edges must be equal.
452
+ if self.test != "mono":
453
+ for neighbor in self.G1[G1_node]:
454
+ if neighbor in self.core_1:
455
+ if self.core_1[neighbor] not in self.G2[G2_node]:
456
+ return False
457
+ elif self.G1.number_of_edges(
458
+ neighbor, G1_node
459
+ ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node):
460
+ return False
461
+
462
+ for neighbor in self.G2[G2_node]:
463
+ if neighbor in self.core_2:
464
+ if self.core_2[neighbor] not in self.G1[G1_node]:
465
+ return False
466
+ elif self.test == "mono":
467
+ if self.G1.number_of_edges(
468
+ self.core_2[neighbor], G1_node
469
+ ) < self.G2.number_of_edges(neighbor, G2_node):
470
+ return False
471
+ else:
472
+ if self.G1.number_of_edges(
473
+ self.core_2[neighbor], G1_node
474
+ ) != self.G2.number_of_edges(neighbor, G2_node):
475
+ return False
476
+
477
+ if self.test != "mono":
478
+ # Look ahead 1
479
+
480
+ # R_terminout
481
+ # The number of neighbors of n in T_1^{inout} is equal to the
482
+ # number of neighbors of m that are in T_2^{inout}, and vice versa.
483
+ num1 = 0
484
+ for neighbor in self.G1[G1_node]:
485
+ if (neighbor in self.inout_1) and (neighbor not in self.core_1):
486
+ num1 += 1
487
+ num2 = 0
488
+ for neighbor in self.G2[G2_node]:
489
+ if (neighbor in self.inout_2) and (neighbor not in self.core_2):
490
+ num2 += 1
491
+ if self.test == "graph":
492
+ if num1 != num2:
493
+ return False
494
+ else: # self.test == 'subgraph'
495
+ if not (num1 >= num2):
496
+ return False
497
+
498
+ # Look ahead 2
499
+
500
+ # R_new
501
+
502
+ # The number of neighbors of n that are neither in the core_1 nor
503
+ # T_1^{inout} is equal to the number of neighbors of m
504
+ # that are neither in core_2 nor T_2^{inout}.
505
+ num1 = 0
506
+ for neighbor in self.G1[G1_node]:
507
+ if neighbor not in self.inout_1:
508
+ num1 += 1
509
+ num2 = 0
510
+ for neighbor in self.G2[G2_node]:
511
+ if neighbor not in self.inout_2:
512
+ num2 += 1
513
+ if self.test == "graph":
514
+ if num1 != num2:
515
+ return False
516
+ else: # self.test == 'subgraph'
517
+ if not (num1 >= num2):
518
+ return False
519
+
520
+ # Otherwise, this node pair is syntactically feasible!
521
+ return True
522
+
523
+
524
+ class DiGraphMatcher(GraphMatcher):
525
+ """Implementation of VF2 algorithm for matching directed graphs.
526
+
527
+ Suitable for DiGraph and MultiDiGraph instances.
528
+ """
529
+
530
+ def __init__(self, G1, G2):
531
+ """Initialize DiGraphMatcher.
532
+
533
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
534
+
535
+ Examples
536
+ --------
537
+ To create a GraphMatcher which checks for syntactic feasibility:
538
+
539
+ >>> from networkx.algorithms import isomorphism
540
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
541
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
542
+ >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
543
+ """
544
+ super().__init__(G1, G2)
545
+
546
+ def candidate_pairs_iter(self):
547
+ """Iterator over candidate pairs of nodes in G1 and G2."""
548
+
549
+ # All computations are done using the current state!
550
+
551
+ G1_nodes = self.G1_nodes
552
+ G2_nodes = self.G2_nodes
553
+ min_key = self.G2_node_order.__getitem__
554
+
555
+ # First we compute the out-terminal sets.
556
+ T1_out = [node for node in self.out_1 if node not in self.core_1]
557
+ T2_out = [node for node in self.out_2 if node not in self.core_2]
558
+
559
+ # If T1_out and T2_out are both nonempty.
560
+ # P(s) = T1_out x {min T2_out}
561
+ if T1_out and T2_out:
562
+ node_2 = min(T2_out, key=min_key)
563
+ for node_1 in T1_out:
564
+ yield node_1, node_2
565
+
566
+ # If T1_out and T2_out were both empty....
567
+ # We compute the in-terminal sets.
568
+
569
+ # elif not (T1_out or T2_out): # as suggested by [2], incorrect
570
+ else: # as suggested by [1], correct
571
+ T1_in = [node for node in self.in_1 if node not in self.core_1]
572
+ T2_in = [node for node in self.in_2 if node not in self.core_2]
573
+
574
+ # If T1_in and T2_in are both nonempty.
575
+ # P(s) = T1_out x {min T2_out}
576
+ if T1_in and T2_in:
577
+ node_2 = min(T2_in, key=min_key)
578
+ for node_1 in T1_in:
579
+ yield node_1, node_2
580
+
581
+ # If all terminal sets are empty...
582
+ # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
583
+
584
+ # elif not (T1_in or T2_in): # as suggested by [2], incorrect
585
+ else: # as inferred from [1], correct
586
+ node_2 = min(G2_nodes - set(self.core_2), key=min_key)
587
+ for node_1 in G1_nodes:
588
+ if node_1 not in self.core_1:
589
+ yield node_1, node_2
590
+
591
+ # For all other cases, we don't have any candidate pairs.
592
+
593
+ def initialize(self):
594
+ """Reinitializes the state of the algorithm.
595
+
596
+ This method should be redefined if using something other than DiGMState.
597
+ If only subclassing GraphMatcher, a redefinition is not necessary.
598
+ """
599
+
600
+ # core_1[n] contains the index of the node paired with n, which is m,
601
+ # provided n is in the mapping.
602
+ # core_2[m] contains the index of the node paired with m, which is n,
603
+ # provided m is in the mapping.
604
+ self.core_1 = {}
605
+ self.core_2 = {}
606
+
607
+ # See the paper for definitions of M_x and T_x^{y}
608
+
609
+ # in_1[n] is non-zero if n is in M_1 or in T_1^{in}
610
+ # out_1[n] is non-zero if n is in M_1 or in T_1^{out}
611
+ #
612
+ # in_2[m] is non-zero if m is in M_2 or in T_2^{in}
613
+ # out_2[m] is non-zero if m is in M_2 or in T_2^{out}
614
+ #
615
+ # The value stored is the depth of the search tree when the node became
616
+ # part of the corresponding set.
617
+ self.in_1 = {}
618
+ self.in_2 = {}
619
+ self.out_1 = {}
620
+ self.out_2 = {}
621
+
622
+ self.state = DiGMState(self)
623
+
624
+ # Provide a convenient way to access the isomorphism mapping.
625
+ self.mapping = self.core_1.copy()
626
+
627
+ def syntactic_feasibility(self, G1_node, G2_node):
628
+ """Returns True if adding (G1_node, G2_node) is syntactically feasible.
629
+
630
+ This function returns True if it is adding the candidate pair
631
+ to the current partial isomorphism/monomorphism mapping is allowable.
632
+ The addition is allowable if the inclusion of the candidate pair does
633
+ not make it impossible for an isomorphism/monomorphism to be found.
634
+ """
635
+
636
+ # The VF2 algorithm was designed to work with graphs having, at most,
637
+ # one edge connecting any two nodes. This is not the case when
638
+ # dealing with an MultiGraphs.
639
+ #
640
+ # Basically, when we test the look-ahead rules R_pred and R_succ, we
641
+ # will make sure that the number of edges are checked. We also add
642
+ # a R_self check to verify that the number of selfloops is acceptable.
643
+
644
+ # Users might be comparing DiGraph instances with MultiDiGraph
645
+ # instances. So the generic DiGraphMatcher class must work with
646
+ # MultiDiGraphs. Care must be taken since the value in the innermost
647
+ # dictionary is a singlet for DiGraph instances. For MultiDiGraphs,
648
+ # the value in the innermost dictionary is a list.
649
+
650
+ ###
651
+ # Test at each step to get a return value as soon as possible.
652
+ ###
653
+
654
+ # Look ahead 0
655
+
656
+ # R_self
657
+
658
+ # The number of selfloops for G1_node must equal the number of
659
+ # self-loops for G2_node. Without this check, we would fail on R_pred
660
+ # at the next recursion level. This should prune the tree even further.
661
+ if self.test == "mono":
662
+ if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
663
+ G2_node, G2_node
664
+ ):
665
+ return False
666
+ else:
667
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
668
+ G2_node, G2_node
669
+ ):
670
+ return False
671
+
672
+ # R_pred
673
+
674
+ # For each predecessor n' of n in the partial mapping, the
675
+ # corresponding node m' is a predecessor of m, and vice versa. Also,
676
+ # the number of edges must be equal
677
+ if self.test != "mono":
678
+ for predecessor in self.G1.pred[G1_node]:
679
+ if predecessor in self.core_1:
680
+ if self.core_1[predecessor] not in self.G2.pred[G2_node]:
681
+ return False
682
+ elif self.G1.number_of_edges(
683
+ predecessor, G1_node
684
+ ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node):
685
+ return False
686
+
687
+ for predecessor in self.G2.pred[G2_node]:
688
+ if predecessor in self.core_2:
689
+ if self.core_2[predecessor] not in self.G1.pred[G1_node]:
690
+ return False
691
+ elif self.test == "mono":
692
+ if self.G1.number_of_edges(
693
+ self.core_2[predecessor], G1_node
694
+ ) < self.G2.number_of_edges(predecessor, G2_node):
695
+ return False
696
+ else:
697
+ if self.G1.number_of_edges(
698
+ self.core_2[predecessor], G1_node
699
+ ) != self.G2.number_of_edges(predecessor, G2_node):
700
+ return False
701
+
702
+ # R_succ
703
+
704
+ # For each successor n' of n in the partial mapping, the corresponding
705
+ # node m' is a successor of m, and vice versa. Also, the number of
706
+ # edges must be equal.
707
+ if self.test != "mono":
708
+ for successor in self.G1[G1_node]:
709
+ if successor in self.core_1:
710
+ if self.core_1[successor] not in self.G2[G2_node]:
711
+ return False
712
+ elif self.G1.number_of_edges(
713
+ G1_node, successor
714
+ ) != self.G2.number_of_edges(G2_node, self.core_1[successor]):
715
+ return False
716
+
717
+ for successor in self.G2[G2_node]:
718
+ if successor in self.core_2:
719
+ if self.core_2[successor] not in self.G1[G1_node]:
720
+ return False
721
+ elif self.test == "mono":
722
+ if self.G1.number_of_edges(
723
+ G1_node, self.core_2[successor]
724
+ ) < self.G2.number_of_edges(G2_node, successor):
725
+ return False
726
+ else:
727
+ if self.G1.number_of_edges(
728
+ G1_node, self.core_2[successor]
729
+ ) != self.G2.number_of_edges(G2_node, successor):
730
+ return False
731
+
732
+ if self.test != "mono":
733
+ # Look ahead 1
734
+
735
+ # R_termin
736
+ # The number of predecessors of n that are in T_1^{in} is equal to the
737
+ # number of predecessors of m that are in T_2^{in}.
738
+ num1 = 0
739
+ for predecessor in self.G1.pred[G1_node]:
740
+ if (predecessor in self.in_1) and (predecessor not in self.core_1):
741
+ num1 += 1
742
+ num2 = 0
743
+ for predecessor in self.G2.pred[G2_node]:
744
+ if (predecessor in self.in_2) and (predecessor not in self.core_2):
745
+ num2 += 1
746
+ if self.test == "graph":
747
+ if num1 != num2:
748
+ return False
749
+ else: # self.test == 'subgraph'
750
+ if not (num1 >= num2):
751
+ return False
752
+
753
+ # The number of successors of n that are in T_1^{in} is equal to the
754
+ # number of successors of m that are in T_2^{in}.
755
+ num1 = 0
756
+ for successor in self.G1[G1_node]:
757
+ if (successor in self.in_1) and (successor not in self.core_1):
758
+ num1 += 1
759
+ num2 = 0
760
+ for successor in self.G2[G2_node]:
761
+ if (successor in self.in_2) and (successor not in self.core_2):
762
+ num2 += 1
763
+ if self.test == "graph":
764
+ if num1 != num2:
765
+ return False
766
+ else: # self.test == 'subgraph'
767
+ if not (num1 >= num2):
768
+ return False
769
+
770
+ # R_termout
771
+
772
+ # The number of predecessors of n that are in T_1^{out} is equal to the
773
+ # number of predecessors of m that are in T_2^{out}.
774
+ num1 = 0
775
+ for predecessor in self.G1.pred[G1_node]:
776
+ if (predecessor in self.out_1) and (predecessor not in self.core_1):
777
+ num1 += 1
778
+ num2 = 0
779
+ for predecessor in self.G2.pred[G2_node]:
780
+ if (predecessor in self.out_2) and (predecessor not in self.core_2):
781
+ num2 += 1
782
+ if self.test == "graph":
783
+ if num1 != num2:
784
+ return False
785
+ else: # self.test == 'subgraph'
786
+ if not (num1 >= num2):
787
+ return False
788
+
789
+ # The number of successors of n that are in T_1^{out} is equal to the
790
+ # number of successors of m that are in T_2^{out}.
791
+ num1 = 0
792
+ for successor in self.G1[G1_node]:
793
+ if (successor in self.out_1) and (successor not in self.core_1):
794
+ num1 += 1
795
+ num2 = 0
796
+ for successor in self.G2[G2_node]:
797
+ if (successor in self.out_2) and (successor not in self.core_2):
798
+ num2 += 1
799
+ if self.test == "graph":
800
+ if num1 != num2:
801
+ return False
802
+ else: # self.test == 'subgraph'
803
+ if not (num1 >= num2):
804
+ return False
805
+
806
+ # Look ahead 2
807
+
808
+ # R_new
809
+
810
+ # The number of predecessors of n that are neither in the core_1 nor
811
+ # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m
812
+ # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
813
+ num1 = 0
814
+ for predecessor in self.G1.pred[G1_node]:
815
+ if (predecessor not in self.in_1) and (predecessor not in self.out_1):
816
+ num1 += 1
817
+ num2 = 0
818
+ for predecessor in self.G2.pred[G2_node]:
819
+ if (predecessor not in self.in_2) and (predecessor not in self.out_2):
820
+ num2 += 1
821
+ if self.test == "graph":
822
+ if num1 != num2:
823
+ return False
824
+ else: # self.test == 'subgraph'
825
+ if not (num1 >= num2):
826
+ return False
827
+
828
+ # The number of successors of n that are neither in the core_1 nor
829
+ # T_1^{in} nor T_1^{out} is equal to the number of successors of m
830
+ # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
831
+ num1 = 0
832
+ for successor in self.G1[G1_node]:
833
+ if (successor not in self.in_1) and (successor not in self.out_1):
834
+ num1 += 1
835
+ num2 = 0
836
+ for successor in self.G2[G2_node]:
837
+ if (successor not in self.in_2) and (successor not in self.out_2):
838
+ num2 += 1
839
+ if self.test == "graph":
840
+ if num1 != num2:
841
+ return False
842
+ else: # self.test == 'subgraph'
843
+ if not (num1 >= num2):
844
+ return False
845
+
846
+ # Otherwise, this node pair is syntactically feasible!
847
+ return True
848
+
849
+
850
+ class GMState:
851
+ """Internal representation of state for the GraphMatcher class.
852
+
853
+ This class is used internally by the GraphMatcher class. It is used
854
+ only to store state specific data. There will be at most G2.order() of
855
+ these objects in memory at a time, due to the depth-first search
856
+ strategy employed by the VF2 algorithm.
857
+ """
858
+
859
+ def __init__(self, GM, G1_node=None, G2_node=None):
860
+ """Initializes GMState object.
861
+
862
+ Pass in the GraphMatcher to which this GMState belongs and the
863
+ new node pair that will be added to the GraphMatcher's current
864
+ isomorphism mapping.
865
+ """
866
+ self.GM = GM
867
+
868
+ # Initialize the last stored node pair.
869
+ self.G1_node = None
870
+ self.G2_node = None
871
+ self.depth = len(GM.core_1)
872
+
873
+ if G1_node is None or G2_node is None:
874
+ # Then we reset the class variables
875
+ GM.core_1 = {}
876
+ GM.core_2 = {}
877
+ GM.inout_1 = {}
878
+ GM.inout_2 = {}
879
+
880
+ # Watch out! G1_node == 0 should evaluate to True.
881
+ if G1_node is not None and G2_node is not None:
882
+ # Add the node pair to the isomorphism mapping.
883
+ GM.core_1[G1_node] = G2_node
884
+ GM.core_2[G2_node] = G1_node
885
+
886
+ # Store the node that was added last.
887
+ self.G1_node = G1_node
888
+ self.G2_node = G2_node
889
+
890
+ # Now we must update the other two vectors.
891
+ # We will add only if it is not in there already!
892
+ self.depth = len(GM.core_1)
893
+
894
+ # First we add the new nodes...
895
+ if G1_node not in GM.inout_1:
896
+ GM.inout_1[G1_node] = self.depth
897
+ if G2_node not in GM.inout_2:
898
+ GM.inout_2[G2_node] = self.depth
899
+
900
+ # Now we add every other node...
901
+
902
+ # Updates for T_1^{inout}
903
+ new_nodes = set()
904
+ for node in GM.core_1:
905
+ new_nodes.update(
906
+ [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]
907
+ )
908
+ for node in new_nodes:
909
+ if node not in GM.inout_1:
910
+ GM.inout_1[node] = self.depth
911
+
912
+ # Updates for T_2^{inout}
913
+ new_nodes = set()
914
+ for node in GM.core_2:
915
+ new_nodes.update(
916
+ [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]
917
+ )
918
+ for node in new_nodes:
919
+ if node not in GM.inout_2:
920
+ GM.inout_2[node] = self.depth
921
+
922
+ def restore(self):
923
+ """Deletes the GMState object and restores the class variables."""
924
+ # First we remove the node that was added from the core vectors.
925
+ # Watch out! G1_node == 0 should evaluate to True.
926
+ if self.G1_node is not None and self.G2_node is not None:
927
+ del self.GM.core_1[self.G1_node]
928
+ del self.GM.core_2[self.G2_node]
929
+
930
+ # Now we revert the other two vectors.
931
+ # Thus, we delete all entries which have this depth level.
932
+ for vector in (self.GM.inout_1, self.GM.inout_2):
933
+ for node in list(vector.keys()):
934
+ if vector[node] == self.depth:
935
+ del vector[node]
936
+
937
+
938
+ class DiGMState:
939
+ """Internal representation of state for the DiGraphMatcher class.
940
+
941
+ This class is used internally by the DiGraphMatcher class. It is used
942
+ only to store state specific data. There will be at most G2.order() of
943
+ these objects in memory at a time, due to the depth-first search
944
+ strategy employed by the VF2 algorithm.
945
+
946
+ """
947
+
948
+ def __init__(self, GM, G1_node=None, G2_node=None):
949
+ """Initializes DiGMState object.
950
+
951
+ Pass in the DiGraphMatcher to which this DiGMState belongs and the
952
+ new node pair that will be added to the GraphMatcher's current
953
+ isomorphism mapping.
954
+ """
955
+ self.GM = GM
956
+
957
+ # Initialize the last stored node pair.
958
+ self.G1_node = None
959
+ self.G2_node = None
960
+ self.depth = len(GM.core_1)
961
+
962
+ if G1_node is None or G2_node is None:
963
+ # Then we reset the class variables
964
+ GM.core_1 = {}
965
+ GM.core_2 = {}
966
+ GM.in_1 = {}
967
+ GM.in_2 = {}
968
+ GM.out_1 = {}
969
+ GM.out_2 = {}
970
+
971
+ # Watch out! G1_node == 0 should evaluate to True.
972
+ if G1_node is not None and G2_node is not None:
973
+ # Add the node pair to the isomorphism mapping.
974
+ GM.core_1[G1_node] = G2_node
975
+ GM.core_2[G2_node] = G1_node
976
+
977
+ # Store the node that was added last.
978
+ self.G1_node = G1_node
979
+ self.G2_node = G2_node
980
+
981
+ # Now we must update the other four vectors.
982
+ # We will add only if it is not in there already!
983
+ self.depth = len(GM.core_1)
984
+
985
+ # First we add the new nodes...
986
+ for vector in (GM.in_1, GM.out_1):
987
+ if G1_node not in vector:
988
+ vector[G1_node] = self.depth
989
+ for vector in (GM.in_2, GM.out_2):
990
+ if G2_node not in vector:
991
+ vector[G2_node] = self.depth
992
+
993
+ # Now we add every other node...
994
+
995
+ # Updates for T_1^{in}
996
+ new_nodes = set()
997
+ for node in GM.core_1:
998
+ new_nodes.update(
999
+ [
1000
+ predecessor
1001
+ for predecessor in GM.G1.predecessors(node)
1002
+ if predecessor not in GM.core_1
1003
+ ]
1004
+ )
1005
+ for node in new_nodes:
1006
+ if node not in GM.in_1:
1007
+ GM.in_1[node] = self.depth
1008
+
1009
+ # Updates for T_2^{in}
1010
+ new_nodes = set()
1011
+ for node in GM.core_2:
1012
+ new_nodes.update(
1013
+ [
1014
+ predecessor
1015
+ for predecessor in GM.G2.predecessors(node)
1016
+ if predecessor not in GM.core_2
1017
+ ]
1018
+ )
1019
+ for node in new_nodes:
1020
+ if node not in GM.in_2:
1021
+ GM.in_2[node] = self.depth
1022
+
1023
+ # Updates for T_1^{out}
1024
+ new_nodes = set()
1025
+ for node in GM.core_1:
1026
+ new_nodes.update(
1027
+ [
1028
+ successor
1029
+ for successor in GM.G1.successors(node)
1030
+ if successor not in GM.core_1
1031
+ ]
1032
+ )
1033
+ for node in new_nodes:
1034
+ if node not in GM.out_1:
1035
+ GM.out_1[node] = self.depth
1036
+
1037
+ # Updates for T_2^{out}
1038
+ new_nodes = set()
1039
+ for node in GM.core_2:
1040
+ new_nodes.update(
1041
+ [
1042
+ successor
1043
+ for successor in GM.G2.successors(node)
1044
+ if successor not in GM.core_2
1045
+ ]
1046
+ )
1047
+ for node in new_nodes:
1048
+ if node not in GM.out_2:
1049
+ GM.out_2[node] = self.depth
1050
+
1051
+ def restore(self):
1052
+ """Deletes the DiGMState object and restores the class variables."""
1053
+
1054
+ # First we remove the node that was added from the core vectors.
1055
+ # Watch out! G1_node == 0 should evaluate to True.
1056
+ if self.G1_node is not None and self.G2_node is not None:
1057
+ del self.GM.core_1[self.G1_node]
1058
+ del self.GM.core_2[self.G2_node]
1059
+
1060
+ # Now we revert the other four vectors.
1061
+ # Thus, we delete all entries which have this depth level.
1062
+ for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2):
1063
+ for node in list(vector.keys()):
1064
+ if vector[node] == self.depth:
1065
+ del vector[node]
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py ADDED
@@ -0,0 +1,351 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions which help end users define customize node_match and
2
+ edge_match functions to use during isomorphism checks.
3
+ """
4
+ import math
5
+ import types
6
+ from itertools import permutations
7
+
8
+ __all__ = [
9
+ "categorical_node_match",
10
+ "categorical_edge_match",
11
+ "categorical_multiedge_match",
12
+ "numerical_node_match",
13
+ "numerical_edge_match",
14
+ "numerical_multiedge_match",
15
+ "generic_node_match",
16
+ "generic_edge_match",
17
+ "generic_multiedge_match",
18
+ ]
19
+
20
+
21
+ def copyfunc(f, name=None):
22
+ """Returns a deepcopy of a function."""
23
+ return types.FunctionType(
24
+ f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__
25
+ )
26
+
27
+
28
+ def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
29
+ """Returns True if x and y are sufficiently close, elementwise.
30
+
31
+ Parameters
32
+ ----------
33
+ rtol : float
34
+ The relative error tolerance.
35
+ atol : float
36
+ The absolute error tolerance.
37
+
38
+ """
39
+ # assume finite weights, see numpy.allclose() for reference
40
+ return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y))
41
+
42
+
43
+ categorical_doc = """
44
+ Returns a comparison function for a categorical node attribute.
45
+
46
+ The value(s) of the attr(s) must be hashable and comparable via the ==
47
+ operator since they are placed into a set([]) object. If the sets from
48
+ G1 and G2 are the same, then the constructed function returns True.
49
+
50
+ Parameters
51
+ ----------
52
+ attr : string | list
53
+ The categorical node attribute to compare, or a list of categorical
54
+ node attributes to compare.
55
+ default : value | list
56
+ The default value for the categorical node attribute, or a list of
57
+ default values for the categorical node attributes.
58
+
59
+ Returns
60
+ -------
61
+ match : function
62
+ The customized, categorical `node_match` function.
63
+
64
+ Examples
65
+ --------
66
+ >>> import networkx.algorithms.isomorphism as iso
67
+ >>> nm = iso.categorical_node_match("size", 1)
68
+ >>> nm = iso.categorical_node_match(["color", "size"], ["red", 2])
69
+
70
+ """
71
+
72
+
73
+ def categorical_node_match(attr, default):
74
+ if isinstance(attr, str):
75
+
76
+ def match(data1, data2):
77
+ return data1.get(attr, default) == data2.get(attr, default)
78
+
79
+ else:
80
+ attrs = list(zip(attr, default)) # Python 3
81
+
82
+ def match(data1, data2):
83
+ return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs)
84
+
85
+ return match
86
+
87
+
88
+ categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match")
89
+
90
+
91
+ def categorical_multiedge_match(attr, default):
92
+ if isinstance(attr, str):
93
+
94
+ def match(datasets1, datasets2):
95
+ values1 = {data.get(attr, default) for data in datasets1.values()}
96
+ values2 = {data.get(attr, default) for data in datasets2.values()}
97
+ return values1 == values2
98
+
99
+ else:
100
+ attrs = list(zip(attr, default)) # Python 3
101
+
102
+ def match(datasets1, datasets2):
103
+ values1 = set()
104
+ for data1 in datasets1.values():
105
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
106
+ values1.add(x)
107
+ values2 = set()
108
+ for data2 in datasets2.values():
109
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
110
+ values2.add(x)
111
+ return values1 == values2
112
+
113
+ return match
114
+
115
+
116
+ # Docstrings for categorical functions.
117
+ categorical_node_match.__doc__ = categorical_doc
118
+ categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge")
119
+ tmpdoc = categorical_doc.replace("node", "edge")
120
+ tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match")
121
+ categorical_multiedge_match.__doc__ = tmpdoc
122
+
123
+
124
+ numerical_doc = """
125
+ Returns a comparison function for a numerical node attribute.
126
+
127
+ The value(s) of the attr(s) must be numerical and sortable. If the
128
+ sorted list of values from G1 and G2 are the same within some
129
+ tolerance, then the constructed function returns True.
130
+
131
+ Parameters
132
+ ----------
133
+ attr : string | list
134
+ The numerical node attribute to compare, or a list of numerical
135
+ node attributes to compare.
136
+ default : value | list
137
+ The default value for the numerical node attribute, or a list of
138
+ default values for the numerical node attributes.
139
+ rtol : float
140
+ The relative error tolerance.
141
+ atol : float
142
+ The absolute error tolerance.
143
+
144
+ Returns
145
+ -------
146
+ match : function
147
+ The customized, numerical `node_match` function.
148
+
149
+ Examples
150
+ --------
151
+ >>> import networkx.algorithms.isomorphism as iso
152
+ >>> nm = iso.numerical_node_match("weight", 1.0)
153
+ >>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5])
154
+
155
+ """
156
+
157
+
158
+ def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
159
+ if isinstance(attr, str):
160
+
161
+ def match(data1, data2):
162
+ return math.isclose(
163
+ data1.get(attr, default),
164
+ data2.get(attr, default),
165
+ rel_tol=rtol,
166
+ abs_tol=atol,
167
+ )
168
+
169
+ else:
170
+ attrs = list(zip(attr, default)) # Python 3
171
+
172
+ def match(data1, data2):
173
+ values1 = [data1.get(attr, d) for attr, d in attrs]
174
+ values2 = [data2.get(attr, d) for attr, d in attrs]
175
+ return allclose(values1, values2, rtol=rtol, atol=atol)
176
+
177
+ return match
178
+
179
+
180
+ numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match")
181
+
182
+
183
+ def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
184
+ if isinstance(attr, str):
185
+
186
+ def match(datasets1, datasets2):
187
+ values1 = sorted(data.get(attr, default) for data in datasets1.values())
188
+ values2 = sorted(data.get(attr, default) for data in datasets2.values())
189
+ return allclose(values1, values2, rtol=rtol, atol=atol)
190
+
191
+ else:
192
+ attrs = list(zip(attr, default)) # Python 3
193
+
194
+ def match(datasets1, datasets2):
195
+ values1 = []
196
+ for data1 in datasets1.values():
197
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
198
+ values1.append(x)
199
+ values2 = []
200
+ for data2 in datasets2.values():
201
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
202
+ values2.append(x)
203
+ values1.sort()
204
+ values2.sort()
205
+ for xi, yi in zip(values1, values2):
206
+ if not allclose(xi, yi, rtol=rtol, atol=atol):
207
+ return False
208
+ else:
209
+ return True
210
+
211
+ return match
212
+
213
+
214
+ # Docstrings for numerical functions.
215
+ numerical_node_match.__doc__ = numerical_doc
216
+ numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge")
217
+ tmpdoc = numerical_doc.replace("node", "edge")
218
+ tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match")
219
+ numerical_multiedge_match.__doc__ = tmpdoc
220
+
221
+
222
+ generic_doc = """
223
+ Returns a comparison function for a generic attribute.
224
+
225
+ The value(s) of the attr(s) are compared using the specified
226
+ operators. If all the attributes are equal, then the constructed
227
+ function returns True.
228
+
229
+ Parameters
230
+ ----------
231
+ attr : string | list
232
+ The node attribute to compare, or a list of node attributes
233
+ to compare.
234
+ default : value | list
235
+ The default value for the node attribute, or a list of
236
+ default values for the node attributes.
237
+ op : callable | list
238
+ The operator to use when comparing attribute values, or a list
239
+ of operators to use when comparing values for each attribute.
240
+
241
+ Returns
242
+ -------
243
+ match : function
244
+ The customized, generic `node_match` function.
245
+
246
+ Examples
247
+ --------
248
+ >>> from operator import eq
249
+ >>> from math import isclose
250
+ >>> from networkx.algorithms.isomorphism import generic_node_match
251
+ >>> nm = generic_node_match("weight", 1.0, isclose)
252
+ >>> nm = generic_node_match("color", "red", eq)
253
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
254
+
255
+ """
256
+
257
+
258
+ def generic_node_match(attr, default, op):
259
+ if isinstance(attr, str):
260
+
261
+ def match(data1, data2):
262
+ return op(data1.get(attr, default), data2.get(attr, default))
263
+
264
+ else:
265
+ attrs = list(zip(attr, default, op)) # Python 3
266
+
267
+ def match(data1, data2):
268
+ for attr, d, operator in attrs:
269
+ if not operator(data1.get(attr, d), data2.get(attr, d)):
270
+ return False
271
+ else:
272
+ return True
273
+
274
+ return match
275
+
276
+
277
+ generic_edge_match = copyfunc(generic_node_match, "generic_edge_match")
278
+
279
+
280
+ def generic_multiedge_match(attr, default, op):
281
+ """Returns a comparison function for a generic attribute.
282
+
283
+ The value(s) of the attr(s) are compared using the specified
284
+ operators. If all the attributes are equal, then the constructed
285
+ function returns True. Potentially, the constructed edge_match
286
+ function can be slow since it must verify that no isomorphism
287
+ exists between the multiedges before it returns False.
288
+
289
+ Parameters
290
+ ----------
291
+ attr : string | list
292
+ The edge attribute to compare, or a list of node attributes
293
+ to compare.
294
+ default : value | list
295
+ The default value for the edge attribute, or a list of
296
+ default values for the edgeattributes.
297
+ op : callable | list
298
+ The operator to use when comparing attribute values, or a list
299
+ of operators to use when comparing values for each attribute.
300
+
301
+ Returns
302
+ -------
303
+ match : function
304
+ The customized, generic `edge_match` function.
305
+
306
+ Examples
307
+ --------
308
+ >>> from operator import eq
309
+ >>> from math import isclose
310
+ >>> from networkx.algorithms.isomorphism import generic_node_match
311
+ >>> nm = generic_node_match("weight", 1.0, isclose)
312
+ >>> nm = generic_node_match("color", "red", eq)
313
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
314
+
315
+ """
316
+
317
+ # This is slow, but generic.
318
+ # We must test every possible isomorphism between the edges.
319
+ if isinstance(attr, str):
320
+ attr = [attr]
321
+ default = [default]
322
+ op = [op]
323
+ attrs = list(zip(attr, default)) # Python 3
324
+
325
+ def match(datasets1, datasets2):
326
+ values1 = []
327
+ for data1 in datasets1.values():
328
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
329
+ values1.append(x)
330
+ values2 = []
331
+ for data2 in datasets2.values():
332
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
333
+ values2.append(x)
334
+ for vals2 in permutations(values2):
335
+ for xi, yi in zip(values1, vals2):
336
+ if not all(map(lambda x, y, z: z(x, y), xi, yi, op)):
337
+ # This is not an isomorphism, go to next permutation.
338
+ break
339
+ else:
340
+ # Then we found an isomorphism.
341
+ return True
342
+ else:
343
+ # Then there are no isomorphisms between the multiedges.
344
+ return False
345
+
346
+ return match
347
+
348
+
349
+ # Docstrings for numerical functions.
350
+ generic_node_match.__doc__ = generic_doc
351
+ generic_edge_match.__doc__ = generic_doc.replace("node", "edge")
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *****************************
3
+ Time-respecting VF2 Algorithm
4
+ *****************************
5
+
6
+ An extension of the VF2 algorithm for time-respecting graph isomorphism
7
+ testing in temporal graphs.
8
+
9
+ A temporal graph is one in which edges contain a datetime attribute,
10
+ denoting when interaction occurred between the incident nodes. A
11
+ time-respecting subgraph of a temporal graph is a subgraph such that
12
+ all interactions incident to a node occurred within a time threshold,
13
+ delta, of each other. A directed time-respecting subgraph has the
14
+ added constraint that incoming interactions to a node must precede
15
+ outgoing interactions from the same node - this enforces a sense of
16
+ directed flow.
17
+
18
+ Introduction
19
+ ------------
20
+
21
+ The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
22
+ extend the GraphMatcher and DiGraphMatcher classes, respectively,
23
+ to include temporal constraints on matches. This is achieved through
24
+ a semantic check, via the semantic_feasibility() function.
25
+
26
+ As well as including G1 (the graph in which to seek embeddings) and
27
+ G2 (the subgraph structure of interest), the name of the temporal
28
+ attribute on the edges and the time threshold, delta, must be supplied
29
+ as arguments to the matching constructors.
30
+
31
+ A delta of zero is the strictest temporal constraint on the match -
32
+ only embeddings in which all interactions occur at the same time will
33
+ be returned. A delta of one day will allow embeddings in which
34
+ adjacent interactions occur up to a day apart.
35
+
36
+ Examples
37
+ --------
38
+
39
+ Examples will be provided when the datetime type has been incorporated.
40
+
41
+
42
+ Temporal Subgraph Isomorphism
43
+ -----------------------------
44
+
45
+ A brief discussion of the somewhat diverse current literature will be
46
+ included here.
47
+
48
+ References
49
+ ----------
50
+
51
+ [1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
52
+ The 2013 IEEE/ACM International Conference on Advances in Social
53
+ Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
54
+ pages 1451 - 1452. [65]
55
+
56
+ For a discussion of the literature on temporal networks:
57
+
58
+ [3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
59
+ 519(3):97–125, 2012.
60
+
61
+ Notes
62
+ -----
63
+
64
+ Handles directed and undirected graphs and graphs with parallel edges.
65
+
66
+ """
67
+
68
+ import networkx as nx
69
+
70
+ from .isomorphvf2 import DiGraphMatcher, GraphMatcher
71
+
72
+ __all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"]
73
+
74
+
75
+ class TimeRespectingGraphMatcher(GraphMatcher):
76
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
77
+ """Initialize TimeRespectingGraphMatcher.
78
+
79
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
80
+
81
+ Examples
82
+ --------
83
+ To create a TimeRespectingGraphMatcher which checks for
84
+ syntactic and semantic feasibility:
85
+
86
+ >>> from networkx.algorithms import isomorphism
87
+ >>> from datetime import timedelta
88
+ >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
89
+
90
+ >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
91
+
92
+ >>> GM = isomorphism.TimeRespectingGraphMatcher(G1, G2, "date", timedelta(days=1))
93
+ """
94
+ self.temporal_attribute_name = temporal_attribute_name
95
+ self.delta = delta
96
+ super().__init__(G1, G2)
97
+
98
+ def one_hop(self, Gx, Gx_node, neighbors):
99
+ """
100
+ Edges one hop out from a node in the mapping should be
101
+ time-respecting with respect to each other.
102
+ """
103
+ dates = []
104
+ for n in neighbors:
105
+ if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary.
106
+ dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
107
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
108
+ for edge in Gx[Gx_node][
109
+ n
110
+ ].values(): # Iterates all edges between node pair.
111
+ dates.append(edge[self.temporal_attribute_name])
112
+ if any(x is None for x in dates):
113
+ raise ValueError("Datetime not supplied for at least one edge.")
114
+ return not dates or max(dates) - min(dates) <= self.delta
115
+
116
+ def two_hop(self, Gx, core_x, Gx_node, neighbors):
117
+ """
118
+ Paths of length 2 from Gx_node should be time-respecting.
119
+ """
120
+ return all(
121
+ self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node])
122
+ for v in neighbors
123
+ )
124
+
125
+ def semantic_feasibility(self, G1_node, G2_node):
126
+ """Returns True if adding (G1_node, G2_node) is semantically
127
+ feasible.
128
+
129
+ Any subclass which redefines semantic_feasibility() must
130
+ maintain the self.tests if needed, to keep the match() method
131
+ functional. Implementations should consider multigraphs.
132
+ """
133
+ neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
134
+ if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node.
135
+ return False
136
+ if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
137
+ return False
138
+ # Otherwise, this node is semantically feasible!
139
+ return True
140
+
141
+
142
+ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
143
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
144
+ """Initialize TimeRespectingDiGraphMatcher.
145
+
146
+ G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances.
147
+
148
+ Examples
149
+ --------
150
+ To create a TimeRespectingDiGraphMatcher which checks for
151
+ syntactic and semantic feasibility:
152
+
153
+ >>> from networkx.algorithms import isomorphism
154
+ >>> from datetime import timedelta
155
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
156
+
157
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
158
+
159
+ >>> GM = isomorphism.TimeRespectingDiGraphMatcher(G1, G2, "date", timedelta(days=1))
160
+ """
161
+ self.temporal_attribute_name = temporal_attribute_name
162
+ self.delta = delta
163
+ super().__init__(G1, G2)
164
+
165
+ def get_pred_dates(self, Gx, Gx_node, core_x, pred):
166
+ """
167
+ Get the dates of edges from predecessors.
168
+ """
169
+ pred_dates = []
170
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
171
+ for n in pred:
172
+ pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
173
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
174
+ for n in pred:
175
+ for edge in Gx[n][
176
+ Gx_node
177
+ ].values(): # Iterates all edge data between node pair.
178
+ pred_dates.append(edge[self.temporal_attribute_name])
179
+ return pred_dates
180
+
181
+ def get_succ_dates(self, Gx, Gx_node, core_x, succ):
182
+ """
183
+ Get the dates of edges to successors.
184
+ """
185
+ succ_dates = []
186
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
187
+ for n in succ:
188
+ succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
189
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
190
+ for n in succ:
191
+ for edge in Gx[Gx_node][
192
+ n
193
+ ].values(): # Iterates all edge data between node pair.
194
+ succ_dates.append(edge[self.temporal_attribute_name])
195
+ return succ_dates
196
+
197
+ def one_hop(self, Gx, Gx_node, core_x, pred, succ):
198
+ """
199
+ The ego node.
200
+ """
201
+ pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
202
+ succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
203
+ return self.test_one(pred_dates, succ_dates) and self.test_two(
204
+ pred_dates, succ_dates
205
+ )
206
+
207
+ def two_hop_pred(self, Gx, Gx_node, core_x, pred):
208
+ """
209
+ The predecessors of the ego node.
210
+ """
211
+ return all(
212
+ self.one_hop(
213
+ Gx,
214
+ p,
215
+ core_x,
216
+ self.preds(Gx, core_x, p),
217
+ self.succs(Gx, core_x, p, Gx_node),
218
+ )
219
+ for p in pred
220
+ )
221
+
222
+ def two_hop_succ(self, Gx, Gx_node, core_x, succ):
223
+ """
224
+ The successors of the ego node.
225
+ """
226
+ return all(
227
+ self.one_hop(
228
+ Gx,
229
+ s,
230
+ core_x,
231
+ self.preds(Gx, core_x, s, Gx_node),
232
+ self.succs(Gx, core_x, s),
233
+ )
234
+ for s in succ
235
+ )
236
+
237
+ def preds(self, Gx, core_x, v, Gx_node=None):
238
+ pred = [n for n in Gx.predecessors(v) if n in core_x]
239
+ if Gx_node:
240
+ pred.append(Gx_node)
241
+ return pred
242
+
243
+ def succs(self, Gx, core_x, v, Gx_node=None):
244
+ succ = [n for n in Gx.successors(v) if n in core_x]
245
+ if Gx_node:
246
+ succ.append(Gx_node)
247
+ return succ
248
+
249
+ def test_one(self, pred_dates, succ_dates):
250
+ """
251
+ Edges one hop out from Gx_node in the mapping should be
252
+ time-respecting with respect to each other, regardless of
253
+ direction.
254
+ """
255
+ time_respecting = True
256
+ dates = pred_dates + succ_dates
257
+
258
+ if any(x is None for x in dates):
259
+ raise ValueError("Date or datetime not supplied for at least one edge.")
260
+
261
+ dates.sort() # Small to large.
262
+ if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
263
+ time_respecting = False
264
+ return time_respecting
265
+
266
+ def test_two(self, pred_dates, succ_dates):
267
+ """
268
+ Edges from a dual Gx_node in the mapping should be ordered in
269
+ a time-respecting manner.
270
+ """
271
+ time_respecting = True
272
+ pred_dates.sort()
273
+ succ_dates.sort()
274
+ # First out before last in; negative of the necessary condition for time-respect.
275
+ if (
276
+ 0 < len(succ_dates)
277
+ and 0 < len(pred_dates)
278
+ and succ_dates[0] < pred_dates[-1]
279
+ ):
280
+ time_respecting = False
281
+ return time_respecting
282
+
283
+ def semantic_feasibility(self, G1_node, G2_node):
284
+ """Returns True if adding (G1_node, G2_node) is semantically
285
+ feasible.
286
+
287
+ Any subclass which redefines semantic_feasibility() must
288
+ maintain the self.tests if needed, to keep the match() method
289
+ functional. Implementations should consider multigraphs.
290
+ """
291
+ pred, succ = (
292
+ [n for n in self.G1.predecessors(G1_node) if n in self.core_1],
293
+ [n for n in self.G1.successors(G1_node) if n in self.core_1],
294
+ )
295
+ if not self.one_hop(
296
+ self.G1, G1_node, self.core_1, pred, succ
297
+ ): # Fail fast on first node.
298
+ return False
299
+ if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
300
+ return False
301
+ if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ):
302
+ return False
303
+ # Otherwise, this node is semantically feasible!
304
+ return True
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc ADDED
Binary file (7.12 kB). View file