Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ckpts/universal/global_step80/zero/4.mlp.dense_4h_to_h.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step80/zero/4.mlp.dense_4h_to_h.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step80/zero/4.mlp.dense_4h_to_h.weight/fp32.pt +3 -0
- ckpts/universal/global_step80/zero/8.mlp.dense_h_to_4h.weight/fp32.pt +3 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/__init__.py +133 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/asteroidal.py +170 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/chains.py +172 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/clique.py +754 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/cluster.py +609 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/core.py +648 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/cuts.py +400 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/cycles.py +1231 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/d_separation.py +722 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/dag.py +1259 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/dominance.py +135 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/dominating.py +94 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/efficiency_measures.py +168 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/euler.py +469 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/graph_hashing.py +322 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/graphical.py +483 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/hybrid.py +195 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/matching.py +1151 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/node_classification.py +218 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/non_randomness.py +96 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/planarity.py +1402 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/polynomials.py +305 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/reciprocity.py +97 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/regular.py +214 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/similarity.py +1777 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/simple_paths.py +937 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/smallworld.py +403 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/sparsifiers.py +295 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/structuralholes.py +283 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/summarization.py +563 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/swap.py +407 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/threshold.py +979 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/tournament.py +406 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/voronoi.py +85 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/wiener.py +226 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/utils/__pycache__/configs.cpython-310.pyc +0 -0
ckpts/universal/global_step80/zero/4.mlp.dense_4h_to_h.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7aff79bf04b9e6551faf632a2723d842c0488a6e09f733ad21222da95dae8888
|
3 |
+
size 33555612
|
ckpts/universal/global_step80/zero/4.mlp.dense_4h_to_h.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2a3ea891200ee09d3ce48863ad37ba1e78090600b3f40bac4944a2686067276a
|
3 |
+
size 33555627
|
ckpts/universal/global_step80/zero/4.mlp.dense_4h_to_h.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:47278af3486b147350b48626adce93565f754c202105ed3d3372343b213d37a3
|
3 |
+
size 33555533
|
ckpts/universal/global_step80/zero/8.mlp.dense_h_to_4h.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:10019d7a6b490b316e2c33e0dd8f5404f4b642b7fd8a83126df1e4db1ea3eee4
|
3 |
+
size 33555533
|
venv/lib/python3.10/site-packages/networkx/algorithms/__init__.py
ADDED
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from networkx.algorithms.assortativity import *
|
2 |
+
from networkx.algorithms.asteroidal import *
|
3 |
+
from networkx.algorithms.boundary import *
|
4 |
+
from networkx.algorithms.broadcasting import *
|
5 |
+
from networkx.algorithms.bridges import *
|
6 |
+
from networkx.algorithms.chains import *
|
7 |
+
from networkx.algorithms.centrality import *
|
8 |
+
from networkx.algorithms.chordal import *
|
9 |
+
from networkx.algorithms.cluster import *
|
10 |
+
from networkx.algorithms.clique import *
|
11 |
+
from networkx.algorithms.communicability_alg import *
|
12 |
+
from networkx.algorithms.components import *
|
13 |
+
from networkx.algorithms.coloring import *
|
14 |
+
from networkx.algorithms.core import *
|
15 |
+
from networkx.algorithms.covering import *
|
16 |
+
from networkx.algorithms.cycles import *
|
17 |
+
from networkx.algorithms.cuts import *
|
18 |
+
from networkx.algorithms.d_separation import *
|
19 |
+
from networkx.algorithms.dag import *
|
20 |
+
from networkx.algorithms.distance_measures import *
|
21 |
+
from networkx.algorithms.distance_regular import *
|
22 |
+
from networkx.algorithms.dominance import *
|
23 |
+
from networkx.algorithms.dominating import *
|
24 |
+
from networkx.algorithms.efficiency_measures import *
|
25 |
+
from networkx.algorithms.euler import *
|
26 |
+
from networkx.algorithms.graphical import *
|
27 |
+
from networkx.algorithms.hierarchy import *
|
28 |
+
from networkx.algorithms.hybrid import *
|
29 |
+
from networkx.algorithms.link_analysis import *
|
30 |
+
from networkx.algorithms.link_prediction import *
|
31 |
+
from networkx.algorithms.lowest_common_ancestors import *
|
32 |
+
from networkx.algorithms.isolate import *
|
33 |
+
from networkx.algorithms.matching import *
|
34 |
+
from networkx.algorithms.minors import *
|
35 |
+
from networkx.algorithms.mis import *
|
36 |
+
from networkx.algorithms.moral import *
|
37 |
+
from networkx.algorithms.non_randomness import *
|
38 |
+
from networkx.algorithms.operators import *
|
39 |
+
from networkx.algorithms.planarity import *
|
40 |
+
from networkx.algorithms.planar_drawing import *
|
41 |
+
from networkx.algorithms.polynomials import *
|
42 |
+
from networkx.algorithms.reciprocity import *
|
43 |
+
from networkx.algorithms.regular import *
|
44 |
+
from networkx.algorithms.richclub import *
|
45 |
+
from networkx.algorithms.shortest_paths import *
|
46 |
+
from networkx.algorithms.similarity import *
|
47 |
+
from networkx.algorithms.graph_hashing import *
|
48 |
+
from networkx.algorithms.simple_paths import *
|
49 |
+
from networkx.algorithms.smallworld import *
|
50 |
+
from networkx.algorithms.smetric import *
|
51 |
+
from networkx.algorithms.structuralholes import *
|
52 |
+
from networkx.algorithms.sparsifiers import *
|
53 |
+
from networkx.algorithms.summarization import *
|
54 |
+
from networkx.algorithms.swap import *
|
55 |
+
from networkx.algorithms.time_dependent import *
|
56 |
+
from networkx.algorithms.traversal import *
|
57 |
+
from networkx.algorithms.triads import *
|
58 |
+
from networkx.algorithms.vitality import *
|
59 |
+
from networkx.algorithms.voronoi import *
|
60 |
+
from networkx.algorithms.walks import *
|
61 |
+
from networkx.algorithms.wiener import *
|
62 |
+
|
63 |
+
# Make certain subpackages available to the user as direct imports from
|
64 |
+
# the `networkx` namespace.
|
65 |
+
from networkx.algorithms import approximation
|
66 |
+
from networkx.algorithms import assortativity
|
67 |
+
from networkx.algorithms import bipartite
|
68 |
+
from networkx.algorithms import node_classification
|
69 |
+
from networkx.algorithms import centrality
|
70 |
+
from networkx.algorithms import chordal
|
71 |
+
from networkx.algorithms import cluster
|
72 |
+
from networkx.algorithms import clique
|
73 |
+
from networkx.algorithms import components
|
74 |
+
from networkx.algorithms import connectivity
|
75 |
+
from networkx.algorithms import community
|
76 |
+
from networkx.algorithms import coloring
|
77 |
+
from networkx.algorithms import flow
|
78 |
+
from networkx.algorithms import isomorphism
|
79 |
+
from networkx.algorithms import link_analysis
|
80 |
+
from networkx.algorithms import lowest_common_ancestors
|
81 |
+
from networkx.algorithms import operators
|
82 |
+
from networkx.algorithms import shortest_paths
|
83 |
+
from networkx.algorithms import tournament
|
84 |
+
from networkx.algorithms import traversal
|
85 |
+
from networkx.algorithms import tree
|
86 |
+
|
87 |
+
# Make certain functions from some of the previous subpackages available
|
88 |
+
# to the user as direct imports from the `networkx` namespace.
|
89 |
+
from networkx.algorithms.bipartite import complete_bipartite_graph
|
90 |
+
from networkx.algorithms.bipartite import is_bipartite
|
91 |
+
from networkx.algorithms.bipartite import projected_graph
|
92 |
+
from networkx.algorithms.connectivity import all_pairs_node_connectivity
|
93 |
+
from networkx.algorithms.connectivity import all_node_cuts
|
94 |
+
from networkx.algorithms.connectivity import average_node_connectivity
|
95 |
+
from networkx.algorithms.connectivity import edge_connectivity
|
96 |
+
from networkx.algorithms.connectivity import edge_disjoint_paths
|
97 |
+
from networkx.algorithms.connectivity import k_components
|
98 |
+
from networkx.algorithms.connectivity import k_edge_components
|
99 |
+
from networkx.algorithms.connectivity import k_edge_subgraphs
|
100 |
+
from networkx.algorithms.connectivity import k_edge_augmentation
|
101 |
+
from networkx.algorithms.connectivity import is_k_edge_connected
|
102 |
+
from networkx.algorithms.connectivity import minimum_edge_cut
|
103 |
+
from networkx.algorithms.connectivity import minimum_node_cut
|
104 |
+
from networkx.algorithms.connectivity import node_connectivity
|
105 |
+
from networkx.algorithms.connectivity import node_disjoint_paths
|
106 |
+
from networkx.algorithms.connectivity import stoer_wagner
|
107 |
+
from networkx.algorithms.flow import capacity_scaling
|
108 |
+
from networkx.algorithms.flow import cost_of_flow
|
109 |
+
from networkx.algorithms.flow import gomory_hu_tree
|
110 |
+
from networkx.algorithms.flow import max_flow_min_cost
|
111 |
+
from networkx.algorithms.flow import maximum_flow
|
112 |
+
from networkx.algorithms.flow import maximum_flow_value
|
113 |
+
from networkx.algorithms.flow import min_cost_flow
|
114 |
+
from networkx.algorithms.flow import min_cost_flow_cost
|
115 |
+
from networkx.algorithms.flow import minimum_cut
|
116 |
+
from networkx.algorithms.flow import minimum_cut_value
|
117 |
+
from networkx.algorithms.flow import network_simplex
|
118 |
+
from networkx.algorithms.isomorphism import could_be_isomorphic
|
119 |
+
from networkx.algorithms.isomorphism import fast_could_be_isomorphic
|
120 |
+
from networkx.algorithms.isomorphism import faster_could_be_isomorphic
|
121 |
+
from networkx.algorithms.isomorphism import is_isomorphic
|
122 |
+
from networkx.algorithms.isomorphism.vf2pp import *
|
123 |
+
from networkx.algorithms.tree.branchings import maximum_branching
|
124 |
+
from networkx.algorithms.tree.branchings import maximum_spanning_arborescence
|
125 |
+
from networkx.algorithms.tree.branchings import minimum_branching
|
126 |
+
from networkx.algorithms.tree.branchings import minimum_spanning_arborescence
|
127 |
+
from networkx.algorithms.tree.branchings import ArborescenceIterator
|
128 |
+
from networkx.algorithms.tree.coding import *
|
129 |
+
from networkx.algorithms.tree.decomposition import *
|
130 |
+
from networkx.algorithms.tree.mst import *
|
131 |
+
from networkx.algorithms.tree.operations import *
|
132 |
+
from networkx.algorithms.tree.recognition import *
|
133 |
+
from networkx.algorithms.tournament import is_tournament
|
venv/lib/python3.10/site-packages/networkx/algorithms/asteroidal.py
ADDED
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Algorithms for asteroidal triples and asteroidal numbers in graphs.
|
3 |
+
|
4 |
+
An asteroidal triple in a graph G is a set of three non-adjacent vertices
|
5 |
+
u, v and w such that there exist a path between any two of them that avoids
|
6 |
+
closed neighborhood of the third. More formally, v_j, v_k belongs to the same
|
7 |
+
connected component of G - N[v_i], where N[v_i] denotes the closed neighborhood
|
8 |
+
of v_i. A graph which does not contain any asteroidal triples is called
|
9 |
+
an AT-free graph. The class of AT-free graphs is a graph class for which
|
10 |
+
many NP-complete problems are solvable in polynomial time. Amongst them,
|
11 |
+
independent set and coloring.
|
12 |
+
"""
|
13 |
+
import networkx as nx
|
14 |
+
from networkx.utils import not_implemented_for
|
15 |
+
|
16 |
+
__all__ = ["is_at_free", "find_asteroidal_triple"]
|
17 |
+
|
18 |
+
|
19 |
+
@not_implemented_for("directed")
|
20 |
+
@not_implemented_for("multigraph")
|
21 |
+
@nx._dispatchable
|
22 |
+
def find_asteroidal_triple(G):
|
23 |
+
r"""Find an asteroidal triple in the given graph.
|
24 |
+
|
25 |
+
An asteroidal triple is a triple of non-adjacent vertices such that
|
26 |
+
there exists a path between any two of them which avoids the closed
|
27 |
+
neighborhood of the third. It checks all independent triples of vertices
|
28 |
+
and whether they are an asteroidal triple or not. This is done with the
|
29 |
+
help of a data structure called a component structure.
|
30 |
+
A component structure encodes information about which vertices belongs to
|
31 |
+
the same connected component when the closed neighborhood of a given vertex
|
32 |
+
is removed from the graph. The algorithm used to check is the trivial
|
33 |
+
one, outlined in [1]_, which has a runtime of
|
34 |
+
:math:`O(|V||\overline{E} + |V||E|)`, where the second term is the
|
35 |
+
creation of the component structure.
|
36 |
+
|
37 |
+
Parameters
|
38 |
+
----------
|
39 |
+
G : NetworkX Graph
|
40 |
+
The graph to check whether is AT-free or not
|
41 |
+
|
42 |
+
Returns
|
43 |
+
-------
|
44 |
+
list or None
|
45 |
+
An asteroidal triple is returned as a list of nodes. If no asteroidal
|
46 |
+
triple exists, i.e. the graph is AT-free, then None is returned.
|
47 |
+
The returned value depends on the certificate parameter. The default
|
48 |
+
option is a bool which is True if the graph is AT-free, i.e. the
|
49 |
+
given graph contains no asteroidal triples, and False otherwise, i.e.
|
50 |
+
if the graph contains at least one asteroidal triple.
|
51 |
+
|
52 |
+
Notes
|
53 |
+
-----
|
54 |
+
The component structure and the algorithm is described in [1]_. The current
|
55 |
+
implementation implements the trivial algorithm for simple graphs.
|
56 |
+
|
57 |
+
References
|
58 |
+
----------
|
59 |
+
.. [1] Ekkehard Köhler,
|
60 |
+
"Recognizing Graphs without asteroidal triples",
|
61 |
+
Journal of Discrete Algorithms 2, pages 439-452, 2004.
|
62 |
+
https://www.sciencedirect.com/science/article/pii/S157086670400019X
|
63 |
+
"""
|
64 |
+
V = set(G.nodes)
|
65 |
+
|
66 |
+
if len(V) < 6:
|
67 |
+
# An asteroidal triple cannot exist in a graph with 5 or less vertices.
|
68 |
+
return None
|
69 |
+
|
70 |
+
component_structure = create_component_structure(G)
|
71 |
+
E_complement = set(nx.complement(G).edges)
|
72 |
+
|
73 |
+
for e in E_complement:
|
74 |
+
u = e[0]
|
75 |
+
v = e[1]
|
76 |
+
u_neighborhood = set(G[u]).union([u])
|
77 |
+
v_neighborhood = set(G[v]).union([v])
|
78 |
+
union_of_neighborhoods = u_neighborhood.union(v_neighborhood)
|
79 |
+
for w in V - union_of_neighborhoods:
|
80 |
+
# Check for each pair of vertices whether they belong to the
|
81 |
+
# same connected component when the closed neighborhood of the
|
82 |
+
# third is removed.
|
83 |
+
if (
|
84 |
+
component_structure[u][v] == component_structure[u][w]
|
85 |
+
and component_structure[v][u] == component_structure[v][w]
|
86 |
+
and component_structure[w][u] == component_structure[w][v]
|
87 |
+
):
|
88 |
+
return [u, v, w]
|
89 |
+
return None
|
90 |
+
|
91 |
+
|
92 |
+
@not_implemented_for("directed")
|
93 |
+
@not_implemented_for("multigraph")
|
94 |
+
@nx._dispatchable
|
95 |
+
def is_at_free(G):
|
96 |
+
"""Check if a graph is AT-free.
|
97 |
+
|
98 |
+
The method uses the `find_asteroidal_triple` method to recognize
|
99 |
+
an AT-free graph. If no asteroidal triple is found the graph is
|
100 |
+
AT-free and True is returned. If at least one asteroidal triple is
|
101 |
+
found the graph is not AT-free and False is returned.
|
102 |
+
|
103 |
+
Parameters
|
104 |
+
----------
|
105 |
+
G : NetworkX Graph
|
106 |
+
The graph to check whether is AT-free or not.
|
107 |
+
|
108 |
+
Returns
|
109 |
+
-------
|
110 |
+
bool
|
111 |
+
True if G is AT-free and False otherwise.
|
112 |
+
|
113 |
+
Examples
|
114 |
+
--------
|
115 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
|
116 |
+
>>> nx.is_at_free(G)
|
117 |
+
True
|
118 |
+
|
119 |
+
>>> G = nx.cycle_graph(6)
|
120 |
+
>>> nx.is_at_free(G)
|
121 |
+
False
|
122 |
+
"""
|
123 |
+
return find_asteroidal_triple(G) is None
|
124 |
+
|
125 |
+
|
126 |
+
@not_implemented_for("directed")
|
127 |
+
@not_implemented_for("multigraph")
|
128 |
+
@nx._dispatchable
|
129 |
+
def create_component_structure(G):
|
130 |
+
r"""Create component structure for G.
|
131 |
+
|
132 |
+
A *component structure* is an `nxn` array, denoted `c`, where `n` is
|
133 |
+
the number of vertices, where each row and column corresponds to a vertex.
|
134 |
+
|
135 |
+
.. math::
|
136 |
+
c_{uv} = \begin{cases} 0, if v \in N[u] \\
|
137 |
+
k, if v \in component k of G \setminus N[u] \end{cases}
|
138 |
+
|
139 |
+
Where `k` is an arbitrary label for each component. The structure is used
|
140 |
+
to simplify the detection of asteroidal triples.
|
141 |
+
|
142 |
+
Parameters
|
143 |
+
----------
|
144 |
+
G : NetworkX Graph
|
145 |
+
Undirected, simple graph.
|
146 |
+
|
147 |
+
Returns
|
148 |
+
-------
|
149 |
+
component_structure : dictionary
|
150 |
+
A dictionary of dictionaries, keyed by pairs of vertices.
|
151 |
+
|
152 |
+
"""
|
153 |
+
V = set(G.nodes)
|
154 |
+
component_structure = {}
|
155 |
+
for v in V:
|
156 |
+
label = 0
|
157 |
+
closed_neighborhood = set(G[v]).union({v})
|
158 |
+
row_dict = {}
|
159 |
+
for u in closed_neighborhood:
|
160 |
+
row_dict[u] = 0
|
161 |
+
|
162 |
+
G_reduced = G.subgraph(set(G.nodes) - closed_neighborhood)
|
163 |
+
for cc in nx.connected_components(G_reduced):
|
164 |
+
label += 1
|
165 |
+
for u in cc:
|
166 |
+
row_dict[u] = label
|
167 |
+
|
168 |
+
component_structure[v] = row_dict
|
169 |
+
|
170 |
+
return component_structure
|
venv/lib/python3.10/site-packages/networkx/algorithms/chains.py
ADDED
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for finding chains in a graph."""
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
from networkx.utils import not_implemented_for
|
5 |
+
|
6 |
+
__all__ = ["chain_decomposition"]
|
7 |
+
|
8 |
+
|
9 |
+
@not_implemented_for("directed")
|
10 |
+
@not_implemented_for("multigraph")
|
11 |
+
@nx._dispatchable
|
12 |
+
def chain_decomposition(G, root=None):
|
13 |
+
"""Returns the chain decomposition of a graph.
|
14 |
+
|
15 |
+
The *chain decomposition* of a graph with respect a depth-first
|
16 |
+
search tree is a set of cycles or paths derived from the set of
|
17 |
+
fundamental cycles of the tree in the following manner. Consider
|
18 |
+
each fundamental cycle with respect to the given tree, represented
|
19 |
+
as a list of edges beginning with the nontree edge oriented away
|
20 |
+
from the root of the tree. For each fundamental cycle, if it
|
21 |
+
overlaps with any previous fundamental cycle, just take the initial
|
22 |
+
non-overlapping segment, which is a path instead of a cycle. Each
|
23 |
+
cycle or path is called a *chain*. For more information, see [1]_.
|
24 |
+
|
25 |
+
Parameters
|
26 |
+
----------
|
27 |
+
G : undirected graph
|
28 |
+
|
29 |
+
root : node (optional)
|
30 |
+
A node in the graph `G`. If specified, only the chain
|
31 |
+
decomposition for the connected component containing this node
|
32 |
+
will be returned. This node indicates the root of the depth-first
|
33 |
+
search tree.
|
34 |
+
|
35 |
+
Yields
|
36 |
+
------
|
37 |
+
chain : list
|
38 |
+
A list of edges representing a chain. There is no guarantee on
|
39 |
+
the orientation of the edges in each chain (for example, if a
|
40 |
+
chain includes the edge joining nodes 1 and 2, the chain may
|
41 |
+
include either (1, 2) or (2, 1)).
|
42 |
+
|
43 |
+
Raises
|
44 |
+
------
|
45 |
+
NodeNotFound
|
46 |
+
If `root` is not in the graph `G`.
|
47 |
+
|
48 |
+
Examples
|
49 |
+
--------
|
50 |
+
>>> G = nx.Graph([(0, 1), (1, 4), (3, 4), (3, 5), (4, 5)])
|
51 |
+
>>> list(nx.chain_decomposition(G))
|
52 |
+
[[(4, 5), (5, 3), (3, 4)]]
|
53 |
+
|
54 |
+
Notes
|
55 |
+
-----
|
56 |
+
The worst-case running time of this implementation is linear in the
|
57 |
+
number of nodes and number of edges [1]_.
|
58 |
+
|
59 |
+
References
|
60 |
+
----------
|
61 |
+
.. [1] Jens M. Schmidt (2013). "A simple test on 2-vertex-
|
62 |
+
and 2-edge-connectivity." *Information Processing Letters*,
|
63 |
+
113, 241–244. Elsevier. <https://doi.org/10.1016/j.ipl.2013.01.016>
|
64 |
+
|
65 |
+
"""
|
66 |
+
|
67 |
+
def _dfs_cycle_forest(G, root=None):
|
68 |
+
"""Builds a directed graph composed of cycles from the given graph.
|
69 |
+
|
70 |
+
`G` is an undirected simple graph. `root` is a node in the graph
|
71 |
+
from which the depth-first search is started.
|
72 |
+
|
73 |
+
This function returns both the depth-first search cycle graph
|
74 |
+
(as a :class:`~networkx.DiGraph`) and the list of nodes in
|
75 |
+
depth-first preorder. The depth-first search cycle graph is a
|
76 |
+
directed graph whose edges are the edges of `G` oriented toward
|
77 |
+
the root if the edge is a tree edge and away from the root if
|
78 |
+
the edge is a non-tree edge. If `root` is not specified, this
|
79 |
+
performs a depth-first search on each connected component of `G`
|
80 |
+
and returns a directed forest instead.
|
81 |
+
|
82 |
+
If `root` is not in the graph, this raises :exc:`KeyError`.
|
83 |
+
|
84 |
+
"""
|
85 |
+
# Create a directed graph from the depth-first search tree with
|
86 |
+
# root node `root` in which tree edges are directed toward the
|
87 |
+
# root and nontree edges are directed away from the root. For
|
88 |
+
# each node with an incident nontree edge, this creates a
|
89 |
+
# directed cycle starting with the nontree edge and returning to
|
90 |
+
# that node.
|
91 |
+
#
|
92 |
+
# The `parent` node attribute stores the parent of each node in
|
93 |
+
# the DFS tree. The `nontree` edge attribute indicates whether
|
94 |
+
# the edge is a tree edge or a nontree edge.
|
95 |
+
#
|
96 |
+
# We also store the order of the nodes found in the depth-first
|
97 |
+
# search in the `nodes` list.
|
98 |
+
H = nx.DiGraph()
|
99 |
+
nodes = []
|
100 |
+
for u, v, d in nx.dfs_labeled_edges(G, source=root):
|
101 |
+
if d == "forward":
|
102 |
+
# `dfs_labeled_edges()` yields (root, root, 'forward')
|
103 |
+
# if it is beginning the search on a new connected
|
104 |
+
# component.
|
105 |
+
if u == v:
|
106 |
+
H.add_node(v, parent=None)
|
107 |
+
nodes.append(v)
|
108 |
+
else:
|
109 |
+
H.add_node(v, parent=u)
|
110 |
+
H.add_edge(v, u, nontree=False)
|
111 |
+
nodes.append(v)
|
112 |
+
# `dfs_labeled_edges` considers nontree edges in both
|
113 |
+
# orientations, so we need to not add the edge if it its
|
114 |
+
# other orientation has been added.
|
115 |
+
elif d == "nontree" and v not in H[u]:
|
116 |
+
H.add_edge(v, u, nontree=True)
|
117 |
+
else:
|
118 |
+
# Do nothing on 'reverse' edges; we only care about
|
119 |
+
# forward and nontree edges.
|
120 |
+
pass
|
121 |
+
return H, nodes
|
122 |
+
|
123 |
+
def _build_chain(G, u, v, visited):
|
124 |
+
"""Generate the chain starting from the given nontree edge.
|
125 |
+
|
126 |
+
`G` is a DFS cycle graph as constructed by
|
127 |
+
:func:`_dfs_cycle_graph`. The edge (`u`, `v`) is a nontree edge
|
128 |
+
that begins a chain. `visited` is a set representing the nodes
|
129 |
+
in `G` that have already been visited.
|
130 |
+
|
131 |
+
This function yields the edges in an initial segment of the
|
132 |
+
fundamental cycle of `G` starting with the nontree edge (`u`,
|
133 |
+
`v`) that includes all the edges up until the first node that
|
134 |
+
appears in `visited`. The tree edges are given by the 'parent'
|
135 |
+
node attribute. The `visited` set is updated to add each node in
|
136 |
+
an edge yielded by this function.
|
137 |
+
|
138 |
+
"""
|
139 |
+
while v not in visited:
|
140 |
+
yield u, v
|
141 |
+
visited.add(v)
|
142 |
+
u, v = v, G.nodes[v]["parent"]
|
143 |
+
yield u, v
|
144 |
+
|
145 |
+
# Check if the root is in the graph G. If not, raise NodeNotFound
|
146 |
+
if root is not None and root not in G:
|
147 |
+
raise nx.NodeNotFound(f"Root node {root} is not in graph")
|
148 |
+
|
149 |
+
# Create a directed version of H that has the DFS edges directed
|
150 |
+
# toward the root and the nontree edges directed away from the root
|
151 |
+
# (in each connected component).
|
152 |
+
H, nodes = _dfs_cycle_forest(G, root)
|
153 |
+
|
154 |
+
# Visit the nodes again in DFS order. For each node, and for each
|
155 |
+
# nontree edge leaving that node, compute the fundamental cycle for
|
156 |
+
# that nontree edge starting with that edge. If the fundamental
|
157 |
+
# cycle overlaps with any visited nodes, just take the prefix of the
|
158 |
+
# cycle up to the point of visited nodes.
|
159 |
+
#
|
160 |
+
# We repeat this process for each connected component (implicitly,
|
161 |
+
# since `nodes` already has a list of the nodes grouped by connected
|
162 |
+
# component).
|
163 |
+
visited = set()
|
164 |
+
for u in nodes:
|
165 |
+
visited.add(u)
|
166 |
+
# For each nontree edge going out of node u...
|
167 |
+
edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d)
|
168 |
+
for u, v in edges:
|
169 |
+
# Create the cycle or cycle prefix starting with the
|
170 |
+
# nontree edge.
|
171 |
+
chain = list(_build_chain(H, u, v, visited))
|
172 |
+
yield chain
|
venv/lib/python3.10/site-packages/networkx/algorithms/clique.py
ADDED
@@ -0,0 +1,754 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for finding and manipulating cliques.
|
2 |
+
|
3 |
+
Finding the largest clique in a graph is NP-complete problem, so most of
|
4 |
+
these algorithms have an exponential running time; for more information,
|
5 |
+
see the Wikipedia article on the clique problem [1]_.
|
6 |
+
|
7 |
+
.. [1] clique problem:: https://en.wikipedia.org/wiki/Clique_problem
|
8 |
+
|
9 |
+
"""
|
10 |
+
from collections import defaultdict, deque
|
11 |
+
from itertools import chain, combinations, islice
|
12 |
+
|
13 |
+
import networkx as nx
|
14 |
+
from networkx.utils import not_implemented_for
|
15 |
+
|
16 |
+
__all__ = [
|
17 |
+
"find_cliques",
|
18 |
+
"find_cliques_recursive",
|
19 |
+
"make_max_clique_graph",
|
20 |
+
"make_clique_bipartite",
|
21 |
+
"node_clique_number",
|
22 |
+
"number_of_cliques",
|
23 |
+
"enumerate_all_cliques",
|
24 |
+
"max_weight_clique",
|
25 |
+
]
|
26 |
+
|
27 |
+
|
28 |
+
@not_implemented_for("directed")
|
29 |
+
@nx._dispatchable
|
30 |
+
def enumerate_all_cliques(G):
|
31 |
+
"""Returns all cliques in an undirected graph.
|
32 |
+
|
33 |
+
This function returns an iterator over cliques, each of which is a
|
34 |
+
list of nodes. The iteration is ordered by cardinality of the
|
35 |
+
cliques: first all cliques of size one, then all cliques of size
|
36 |
+
two, etc.
|
37 |
+
|
38 |
+
Parameters
|
39 |
+
----------
|
40 |
+
G : NetworkX graph
|
41 |
+
An undirected graph.
|
42 |
+
|
43 |
+
Returns
|
44 |
+
-------
|
45 |
+
iterator
|
46 |
+
An iterator over cliques, each of which is a list of nodes in
|
47 |
+
`G`. The cliques are ordered according to size.
|
48 |
+
|
49 |
+
Notes
|
50 |
+
-----
|
51 |
+
To obtain a list of all cliques, use
|
52 |
+
`list(enumerate_all_cliques(G))`. However, be aware that in the
|
53 |
+
worst-case, the length of this list can be exponential in the number
|
54 |
+
of nodes in the graph (for example, when the graph is the complete
|
55 |
+
graph). This function avoids storing all cliques in memory by only
|
56 |
+
keeping current candidate node lists in memory during its search.
|
57 |
+
|
58 |
+
The implementation is adapted from the algorithm by Zhang, et
|
59 |
+
al. (2005) [1]_ to output all cliques discovered.
|
60 |
+
|
61 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
62 |
+
are not conventionally defined with such edges.
|
63 |
+
|
64 |
+
References
|
65 |
+
----------
|
66 |
+
.. [1] Yun Zhang, Abu-Khzam, F.N., Baldwin, N.E., Chesler, E.J.,
|
67 |
+
Langston, M.A., Samatova, N.F.,
|
68 |
+
"Genome-Scale Computational Approaches to Memory-Intensive
|
69 |
+
Applications in Systems Biology".
|
70 |
+
*Supercomputing*, 2005. Proceedings of the ACM/IEEE SC 2005
|
71 |
+
Conference, pp. 12, 12--18 Nov. 2005.
|
72 |
+
<https://doi.org/10.1109/SC.2005.29>.
|
73 |
+
|
74 |
+
"""
|
75 |
+
index = {}
|
76 |
+
nbrs = {}
|
77 |
+
for u in G:
|
78 |
+
index[u] = len(index)
|
79 |
+
# Neighbors of u that appear after u in the iteration order of G.
|
80 |
+
nbrs[u] = {v for v in G[u] if v not in index}
|
81 |
+
|
82 |
+
queue = deque(([u], sorted(nbrs[u], key=index.__getitem__)) for u in G)
|
83 |
+
# Loop invariants:
|
84 |
+
# 1. len(base) is nondecreasing.
|
85 |
+
# 2. (base + cnbrs) is sorted with respect to the iteration order of G.
|
86 |
+
# 3. cnbrs is a set of common neighbors of nodes in base.
|
87 |
+
while queue:
|
88 |
+
base, cnbrs = map(list, queue.popleft())
|
89 |
+
yield base
|
90 |
+
for i, u in enumerate(cnbrs):
|
91 |
+
# Use generators to reduce memory consumption.
|
92 |
+
queue.append(
|
93 |
+
(
|
94 |
+
chain(base, [u]),
|
95 |
+
filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)),
|
96 |
+
)
|
97 |
+
)
|
98 |
+
|
99 |
+
|
100 |
+
@not_implemented_for("directed")
|
101 |
+
@nx._dispatchable
|
102 |
+
def find_cliques(G, nodes=None):
|
103 |
+
"""Returns all maximal cliques in an undirected graph.
|
104 |
+
|
105 |
+
For each node *n*, a *maximal clique for n* is a largest complete
|
106 |
+
subgraph containing *n*. The largest maximal clique is sometimes
|
107 |
+
called the *maximum clique*.
|
108 |
+
|
109 |
+
This function returns an iterator over cliques, each of which is a
|
110 |
+
list of nodes. It is an iterative implementation, so should not
|
111 |
+
suffer from recursion depth issues.
|
112 |
+
|
113 |
+
This function accepts a list of `nodes` and only the maximal cliques
|
114 |
+
containing all of these `nodes` are returned. It can considerably speed up
|
115 |
+
the running time if some specific cliques are desired.
|
116 |
+
|
117 |
+
Parameters
|
118 |
+
----------
|
119 |
+
G : NetworkX graph
|
120 |
+
An undirected graph.
|
121 |
+
|
122 |
+
nodes : list, optional (default=None)
|
123 |
+
If provided, only yield *maximal cliques* containing all nodes in `nodes`.
|
124 |
+
If `nodes` isn't a clique itself, a ValueError is raised.
|
125 |
+
|
126 |
+
Returns
|
127 |
+
-------
|
128 |
+
iterator
|
129 |
+
An iterator over maximal cliques, each of which is a list of
|
130 |
+
nodes in `G`. If `nodes` is provided, only the maximal cliques
|
131 |
+
containing all the nodes in `nodes` are returned. The order of
|
132 |
+
cliques is arbitrary.
|
133 |
+
|
134 |
+
Raises
|
135 |
+
------
|
136 |
+
ValueError
|
137 |
+
If `nodes` is not a clique.
|
138 |
+
|
139 |
+
Examples
|
140 |
+
--------
|
141 |
+
>>> from pprint import pprint # For nice dict formatting
|
142 |
+
>>> G = nx.karate_club_graph()
|
143 |
+
>>> sum(1 for c in nx.find_cliques(G)) # The number of maximal cliques in G
|
144 |
+
36
|
145 |
+
>>> max(nx.find_cliques(G), key=len) # The largest maximal clique in G
|
146 |
+
[0, 1, 2, 3, 13]
|
147 |
+
|
148 |
+
The size of the largest maximal clique is known as the *clique number* of
|
149 |
+
the graph, which can be found directly with:
|
150 |
+
|
151 |
+
>>> max(len(c) for c in nx.find_cliques(G))
|
152 |
+
5
|
153 |
+
|
154 |
+
One can also compute the number of maximal cliques in `G` that contain a given
|
155 |
+
node. The following produces a dictionary keyed by node whose
|
156 |
+
values are the number of maximal cliques in `G` that contain the node:
|
157 |
+
|
158 |
+
>>> pprint({n: sum(1 for c in nx.find_cliques(G) if n in c) for n in G})
|
159 |
+
{0: 13,
|
160 |
+
1: 6,
|
161 |
+
2: 7,
|
162 |
+
3: 3,
|
163 |
+
4: 2,
|
164 |
+
5: 3,
|
165 |
+
6: 3,
|
166 |
+
7: 1,
|
167 |
+
8: 3,
|
168 |
+
9: 2,
|
169 |
+
10: 2,
|
170 |
+
11: 1,
|
171 |
+
12: 1,
|
172 |
+
13: 2,
|
173 |
+
14: 1,
|
174 |
+
15: 1,
|
175 |
+
16: 1,
|
176 |
+
17: 1,
|
177 |
+
18: 1,
|
178 |
+
19: 2,
|
179 |
+
20: 1,
|
180 |
+
21: 1,
|
181 |
+
22: 1,
|
182 |
+
23: 3,
|
183 |
+
24: 2,
|
184 |
+
25: 2,
|
185 |
+
26: 1,
|
186 |
+
27: 3,
|
187 |
+
28: 2,
|
188 |
+
29: 2,
|
189 |
+
30: 2,
|
190 |
+
31: 4,
|
191 |
+
32: 9,
|
192 |
+
33: 14}
|
193 |
+
|
194 |
+
Or, similarly, the maximal cliques in `G` that contain a given node.
|
195 |
+
For example, the 4 maximal cliques that contain node 31:
|
196 |
+
|
197 |
+
>>> [c for c in nx.find_cliques(G) if 31 in c]
|
198 |
+
[[0, 31], [33, 32, 31], [33, 28, 31], [24, 25, 31]]
|
199 |
+
|
200 |
+
See Also
|
201 |
+
--------
|
202 |
+
find_cliques_recursive
|
203 |
+
A recursive version of the same algorithm.
|
204 |
+
|
205 |
+
Notes
|
206 |
+
-----
|
207 |
+
To obtain a list of all maximal cliques, use
|
208 |
+
`list(find_cliques(G))`. However, be aware that in the worst-case,
|
209 |
+
the length of this list can be exponential in the number of nodes in
|
210 |
+
the graph. This function avoids storing all cliques in memory by
|
211 |
+
only keeping current candidate node lists in memory during its search.
|
212 |
+
|
213 |
+
This implementation is based on the algorithm published by Bron and
|
214 |
+
Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
|
215 |
+
(2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. It
|
216 |
+
essentially unrolls the recursion used in the references to avoid
|
217 |
+
issues of recursion stack depth (for a recursive implementation, see
|
218 |
+
:func:`find_cliques_recursive`).
|
219 |
+
|
220 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
221 |
+
are not conventionally defined with such edges.
|
222 |
+
|
223 |
+
References
|
224 |
+
----------
|
225 |
+
.. [1] Bron, C. and Kerbosch, J.
|
226 |
+
"Algorithm 457: finding all cliques of an undirected graph".
|
227 |
+
*Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
|
228 |
+
<http://portal.acm.org/citation.cfm?doid=362342.362367>
|
229 |
+
|
230 |
+
.. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
|
231 |
+
"The worst-case time complexity for generating all maximal
|
232 |
+
cliques and computational experiments",
|
233 |
+
*Theoretical Computer Science*, Volume 363, Issue 1,
|
234 |
+
Computing and Combinatorics,
|
235 |
+
10th Annual International Conference on
|
236 |
+
Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
|
237 |
+
<https://doi.org/10.1016/j.tcs.2006.06.015>
|
238 |
+
|
239 |
+
.. [3] F. Cazals, C. Karande,
|
240 |
+
"A note on the problem of reporting maximal cliques",
|
241 |
+
*Theoretical Computer Science*,
|
242 |
+
Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
|
243 |
+
<https://doi.org/10.1016/j.tcs.2008.05.010>
|
244 |
+
|
245 |
+
"""
|
246 |
+
if len(G) == 0:
|
247 |
+
return
|
248 |
+
|
249 |
+
adj = {u: {v for v in G[u] if v != u} for u in G}
|
250 |
+
|
251 |
+
# Initialize Q with the given nodes and subg, cand with their nbrs
|
252 |
+
Q = nodes[:] if nodes is not None else []
|
253 |
+
cand = set(G)
|
254 |
+
for node in Q:
|
255 |
+
if node not in cand:
|
256 |
+
raise ValueError(f"The given `nodes` {nodes} do not form a clique")
|
257 |
+
cand &= adj[node]
|
258 |
+
|
259 |
+
if not cand:
|
260 |
+
yield Q[:]
|
261 |
+
return
|
262 |
+
|
263 |
+
subg = cand.copy()
|
264 |
+
stack = []
|
265 |
+
Q.append(None)
|
266 |
+
|
267 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
268 |
+
ext_u = cand - adj[u]
|
269 |
+
|
270 |
+
try:
|
271 |
+
while True:
|
272 |
+
if ext_u:
|
273 |
+
q = ext_u.pop()
|
274 |
+
cand.remove(q)
|
275 |
+
Q[-1] = q
|
276 |
+
adj_q = adj[q]
|
277 |
+
subg_q = subg & adj_q
|
278 |
+
if not subg_q:
|
279 |
+
yield Q[:]
|
280 |
+
else:
|
281 |
+
cand_q = cand & adj_q
|
282 |
+
if cand_q:
|
283 |
+
stack.append((subg, cand, ext_u))
|
284 |
+
Q.append(None)
|
285 |
+
subg = subg_q
|
286 |
+
cand = cand_q
|
287 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
288 |
+
ext_u = cand - adj[u]
|
289 |
+
else:
|
290 |
+
Q.pop()
|
291 |
+
subg, cand, ext_u = stack.pop()
|
292 |
+
except IndexError:
|
293 |
+
pass
|
294 |
+
|
295 |
+
|
296 |
+
# TODO Should this also be not implemented for directed graphs?
|
297 |
+
@nx._dispatchable
|
298 |
+
def find_cliques_recursive(G, nodes=None):
|
299 |
+
"""Returns all maximal cliques in a graph.
|
300 |
+
|
301 |
+
For each node *v*, a *maximal clique for v* is a largest complete
|
302 |
+
subgraph containing *v*. The largest maximal clique is sometimes
|
303 |
+
called the *maximum clique*.
|
304 |
+
|
305 |
+
This function returns an iterator over cliques, each of which is a
|
306 |
+
list of nodes. It is a recursive implementation, so may suffer from
|
307 |
+
recursion depth issues, but is included for pedagogical reasons.
|
308 |
+
For a non-recursive implementation, see :func:`find_cliques`.
|
309 |
+
|
310 |
+
This function accepts a list of `nodes` and only the maximal cliques
|
311 |
+
containing all of these `nodes` are returned. It can considerably speed up
|
312 |
+
the running time if some specific cliques are desired.
|
313 |
+
|
314 |
+
Parameters
|
315 |
+
----------
|
316 |
+
G : NetworkX graph
|
317 |
+
|
318 |
+
nodes : list, optional (default=None)
|
319 |
+
If provided, only yield *maximal cliques* containing all nodes in `nodes`.
|
320 |
+
If `nodes` isn't a clique itself, a ValueError is raised.
|
321 |
+
|
322 |
+
Returns
|
323 |
+
-------
|
324 |
+
iterator
|
325 |
+
An iterator over maximal cliques, each of which is a list of
|
326 |
+
nodes in `G`. If `nodes` is provided, only the maximal cliques
|
327 |
+
containing all the nodes in `nodes` are yielded. The order of
|
328 |
+
cliques is arbitrary.
|
329 |
+
|
330 |
+
Raises
|
331 |
+
------
|
332 |
+
ValueError
|
333 |
+
If `nodes` is not a clique.
|
334 |
+
|
335 |
+
See Also
|
336 |
+
--------
|
337 |
+
find_cliques
|
338 |
+
An iterative version of the same algorithm. See docstring for examples.
|
339 |
+
|
340 |
+
Notes
|
341 |
+
-----
|
342 |
+
To obtain a list of all maximal cliques, use
|
343 |
+
`list(find_cliques_recursive(G))`. However, be aware that in the
|
344 |
+
worst-case, the length of this list can be exponential in the number
|
345 |
+
of nodes in the graph. This function avoids storing all cliques in memory
|
346 |
+
by only keeping current candidate node lists in memory during its search.
|
347 |
+
|
348 |
+
This implementation is based on the algorithm published by Bron and
|
349 |
+
Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
|
350 |
+
(2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. For a
|
351 |
+
non-recursive implementation, see :func:`find_cliques`.
|
352 |
+
|
353 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
354 |
+
are not conventionally defined with such edges.
|
355 |
+
|
356 |
+
References
|
357 |
+
----------
|
358 |
+
.. [1] Bron, C. and Kerbosch, J.
|
359 |
+
"Algorithm 457: finding all cliques of an undirected graph".
|
360 |
+
*Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
|
361 |
+
<http://portal.acm.org/citation.cfm?doid=362342.362367>
|
362 |
+
|
363 |
+
.. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
|
364 |
+
"The worst-case time complexity for generating all maximal
|
365 |
+
cliques and computational experiments",
|
366 |
+
*Theoretical Computer Science*, Volume 363, Issue 1,
|
367 |
+
Computing and Combinatorics,
|
368 |
+
10th Annual International Conference on
|
369 |
+
Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
|
370 |
+
<https://doi.org/10.1016/j.tcs.2006.06.015>
|
371 |
+
|
372 |
+
.. [3] F. Cazals, C. Karande,
|
373 |
+
"A note on the problem of reporting maximal cliques",
|
374 |
+
*Theoretical Computer Science*,
|
375 |
+
Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
|
376 |
+
<https://doi.org/10.1016/j.tcs.2008.05.010>
|
377 |
+
|
378 |
+
"""
|
379 |
+
if len(G) == 0:
|
380 |
+
return iter([])
|
381 |
+
|
382 |
+
adj = {u: {v for v in G[u] if v != u} for u in G}
|
383 |
+
|
384 |
+
# Initialize Q with the given nodes and subg, cand with their nbrs
|
385 |
+
Q = nodes[:] if nodes is not None else []
|
386 |
+
cand_init = set(G)
|
387 |
+
for node in Q:
|
388 |
+
if node not in cand_init:
|
389 |
+
raise ValueError(f"The given `nodes` {nodes} do not form a clique")
|
390 |
+
cand_init &= adj[node]
|
391 |
+
|
392 |
+
if not cand_init:
|
393 |
+
return iter([Q])
|
394 |
+
|
395 |
+
subg_init = cand_init.copy()
|
396 |
+
|
397 |
+
def expand(subg, cand):
|
398 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
399 |
+
for q in cand - adj[u]:
|
400 |
+
cand.remove(q)
|
401 |
+
Q.append(q)
|
402 |
+
adj_q = adj[q]
|
403 |
+
subg_q = subg & adj_q
|
404 |
+
if not subg_q:
|
405 |
+
yield Q[:]
|
406 |
+
else:
|
407 |
+
cand_q = cand & adj_q
|
408 |
+
if cand_q:
|
409 |
+
yield from expand(subg_q, cand_q)
|
410 |
+
Q.pop()
|
411 |
+
|
412 |
+
return expand(subg_init, cand_init)
|
413 |
+
|
414 |
+
|
415 |
+
@nx._dispatchable(returns_graph=True)
|
416 |
+
def make_max_clique_graph(G, create_using=None):
|
417 |
+
"""Returns the maximal clique graph of the given graph.
|
418 |
+
|
419 |
+
The nodes of the maximal clique graph of `G` are the cliques of
|
420 |
+
`G` and an edge joins two cliques if the cliques are not disjoint.
|
421 |
+
|
422 |
+
Parameters
|
423 |
+
----------
|
424 |
+
G : NetworkX graph
|
425 |
+
|
426 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
427 |
+
Graph type to create. If graph instance, then cleared before populated.
|
428 |
+
|
429 |
+
Returns
|
430 |
+
-------
|
431 |
+
NetworkX graph
|
432 |
+
A graph whose nodes are the cliques of `G` and whose edges
|
433 |
+
join two cliques if they are not disjoint.
|
434 |
+
|
435 |
+
Notes
|
436 |
+
-----
|
437 |
+
This function behaves like the following code::
|
438 |
+
|
439 |
+
import networkx as nx
|
440 |
+
|
441 |
+
G = nx.make_clique_bipartite(G)
|
442 |
+
cliques = [v for v in G.nodes() if G.nodes[v]["bipartite"] == 0]
|
443 |
+
G = nx.bipartite.projected_graph(G, cliques)
|
444 |
+
G = nx.relabel_nodes(G, {-v: v - 1 for v in G})
|
445 |
+
|
446 |
+
It should be faster, though, since it skips all the intermediate
|
447 |
+
steps.
|
448 |
+
|
449 |
+
"""
|
450 |
+
if create_using is None:
|
451 |
+
B = G.__class__()
|
452 |
+
else:
|
453 |
+
B = nx.empty_graph(0, create_using)
|
454 |
+
cliques = list(enumerate(set(c) for c in find_cliques(G)))
|
455 |
+
# Add a numbered node for each clique.
|
456 |
+
B.add_nodes_from(i for i, c in cliques)
|
457 |
+
# Join cliques by an edge if they share a node.
|
458 |
+
clique_pairs = combinations(cliques, 2)
|
459 |
+
B.add_edges_from((i, j) for (i, c1), (j, c2) in clique_pairs if c1 & c2)
|
460 |
+
return B
|
461 |
+
|
462 |
+
|
463 |
+
@nx._dispatchable(returns_graph=True)
|
464 |
+
def make_clique_bipartite(G, fpos=None, create_using=None, name=None):
|
465 |
+
"""Returns the bipartite clique graph corresponding to `G`.
|
466 |
+
|
467 |
+
In the returned bipartite graph, the "bottom" nodes are the nodes of
|
468 |
+
`G` and the "top" nodes represent the maximal cliques of `G`.
|
469 |
+
There is an edge from node *v* to clique *C* in the returned graph
|
470 |
+
if and only if *v* is an element of *C*.
|
471 |
+
|
472 |
+
Parameters
|
473 |
+
----------
|
474 |
+
G : NetworkX graph
|
475 |
+
An undirected graph.
|
476 |
+
|
477 |
+
fpos : bool
|
478 |
+
If True or not None, the returned graph will have an
|
479 |
+
additional attribute, `pos`, a dictionary mapping node to
|
480 |
+
position in the Euclidean plane.
|
481 |
+
|
482 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
483 |
+
Graph type to create. If graph instance, then cleared before populated.
|
484 |
+
|
485 |
+
Returns
|
486 |
+
-------
|
487 |
+
NetworkX graph
|
488 |
+
A bipartite graph whose "bottom" set is the nodes of the graph
|
489 |
+
`G`, whose "top" set is the cliques of `G`, and whose edges
|
490 |
+
join nodes of `G` to the cliques that contain them.
|
491 |
+
|
492 |
+
The nodes of the graph `G` have the node attribute
|
493 |
+
'bipartite' set to 1 and the nodes representing cliques
|
494 |
+
have the node attribute 'bipartite' set to 0, as is the
|
495 |
+
convention for bipartite graphs in NetworkX.
|
496 |
+
|
497 |
+
"""
|
498 |
+
B = nx.empty_graph(0, create_using)
|
499 |
+
B.clear()
|
500 |
+
# The "bottom" nodes in the bipartite graph are the nodes of the
|
501 |
+
# original graph, G.
|
502 |
+
B.add_nodes_from(G, bipartite=1)
|
503 |
+
for i, cl in enumerate(find_cliques(G)):
|
504 |
+
# The "top" nodes in the bipartite graph are the cliques. These
|
505 |
+
# nodes get negative numbers as labels.
|
506 |
+
name = -i - 1
|
507 |
+
B.add_node(name, bipartite=0)
|
508 |
+
B.add_edges_from((v, name) for v in cl)
|
509 |
+
return B
|
510 |
+
|
511 |
+
|
512 |
+
@nx._dispatchable
|
513 |
+
def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False):
|
514 |
+
"""Returns the size of the largest maximal clique containing each given node.
|
515 |
+
|
516 |
+
Returns a single or list depending on input nodes.
|
517 |
+
An optional list of cliques can be input if already computed.
|
518 |
+
|
519 |
+
Parameters
|
520 |
+
----------
|
521 |
+
G : NetworkX graph
|
522 |
+
An undirected graph.
|
523 |
+
|
524 |
+
cliques : list, optional (default=None)
|
525 |
+
A list of cliques, each of which is itself a list of nodes.
|
526 |
+
If not specified, the list of all cliques will be computed
|
527 |
+
using :func:`find_cliques`.
|
528 |
+
|
529 |
+
Returns
|
530 |
+
-------
|
531 |
+
int or dict
|
532 |
+
If `nodes` is a single node, returns the size of the
|
533 |
+
largest maximal clique in `G` containing that node.
|
534 |
+
Otherwise return a dict keyed by node to the size
|
535 |
+
of the largest maximal clique containing that node.
|
536 |
+
|
537 |
+
See Also
|
538 |
+
--------
|
539 |
+
find_cliques
|
540 |
+
find_cliques yields the maximal cliques of G.
|
541 |
+
It accepts a `nodes` argument which restricts consideration to
|
542 |
+
maximal cliques containing all the given `nodes`.
|
543 |
+
The search for the cliques is optimized for `nodes`.
|
544 |
+
"""
|
545 |
+
if cliques is None:
|
546 |
+
if nodes is not None:
|
547 |
+
# Use ego_graph to decrease size of graph
|
548 |
+
# check for single node
|
549 |
+
if nodes in G:
|
550 |
+
return max(len(c) for c in find_cliques(nx.ego_graph(G, nodes)))
|
551 |
+
# handle multiple nodes
|
552 |
+
return {
|
553 |
+
n: max(len(c) for c in find_cliques(nx.ego_graph(G, n))) for n in nodes
|
554 |
+
}
|
555 |
+
|
556 |
+
# nodes is None--find all cliques
|
557 |
+
cliques = list(find_cliques(G))
|
558 |
+
|
559 |
+
# single node requested
|
560 |
+
if nodes in G:
|
561 |
+
return max(len(c) for c in cliques if nodes in c)
|
562 |
+
|
563 |
+
# multiple nodes requested
|
564 |
+
# preprocess all nodes (faster than one at a time for even 2 nodes)
|
565 |
+
size_for_n = defaultdict(int)
|
566 |
+
for c in cliques:
|
567 |
+
size_of_c = len(c)
|
568 |
+
for n in c:
|
569 |
+
if size_for_n[n] < size_of_c:
|
570 |
+
size_for_n[n] = size_of_c
|
571 |
+
if nodes is None:
|
572 |
+
return size_for_n
|
573 |
+
return {n: size_for_n[n] for n in nodes}
|
574 |
+
|
575 |
+
|
576 |
+
def number_of_cliques(G, nodes=None, cliques=None):
|
577 |
+
"""Returns the number of maximal cliques for each node.
|
578 |
+
|
579 |
+
Returns a single or list depending on input nodes.
|
580 |
+
Optional list of cliques can be input if already computed.
|
581 |
+
"""
|
582 |
+
if cliques is None:
|
583 |
+
cliques = list(find_cliques(G))
|
584 |
+
|
585 |
+
if nodes is None:
|
586 |
+
nodes = list(G.nodes()) # none, get entire graph
|
587 |
+
|
588 |
+
if not isinstance(nodes, list): # check for a list
|
589 |
+
v = nodes
|
590 |
+
# assume it is a single value
|
591 |
+
numcliq = len([1 for c in cliques if v in c])
|
592 |
+
else:
|
593 |
+
numcliq = {}
|
594 |
+
for v in nodes:
|
595 |
+
numcliq[v] = len([1 for c in cliques if v in c])
|
596 |
+
return numcliq
|
597 |
+
|
598 |
+
|
599 |
+
class MaxWeightClique:
|
600 |
+
"""A class for the maximum weight clique algorithm.
|
601 |
+
|
602 |
+
This class is a helper for the `max_weight_clique` function. The class
|
603 |
+
should not normally be used directly.
|
604 |
+
|
605 |
+
Parameters
|
606 |
+
----------
|
607 |
+
G : NetworkX graph
|
608 |
+
The undirected graph for which a maximum weight clique is sought
|
609 |
+
weight : string or None, optional (default='weight')
|
610 |
+
The node attribute that holds the integer value used as a weight.
|
611 |
+
If None, then each node has weight 1.
|
612 |
+
|
613 |
+
Attributes
|
614 |
+
----------
|
615 |
+
G : NetworkX graph
|
616 |
+
The undirected graph for which a maximum weight clique is sought
|
617 |
+
node_weights: dict
|
618 |
+
The weight of each node
|
619 |
+
incumbent_nodes : list
|
620 |
+
The nodes of the incumbent clique (the best clique found so far)
|
621 |
+
incumbent_weight: int
|
622 |
+
The weight of the incumbent clique
|
623 |
+
"""
|
624 |
+
|
625 |
+
def __init__(self, G, weight):
|
626 |
+
self.G = G
|
627 |
+
self.incumbent_nodes = []
|
628 |
+
self.incumbent_weight = 0
|
629 |
+
|
630 |
+
if weight is None:
|
631 |
+
self.node_weights = {v: 1 for v in G.nodes()}
|
632 |
+
else:
|
633 |
+
for v in G.nodes():
|
634 |
+
if weight not in G.nodes[v]:
|
635 |
+
errmsg = f"Node {v!r} does not have the requested weight field."
|
636 |
+
raise KeyError(errmsg)
|
637 |
+
if not isinstance(G.nodes[v][weight], int):
|
638 |
+
errmsg = f"The {weight!r} field of node {v!r} is not an integer."
|
639 |
+
raise ValueError(errmsg)
|
640 |
+
self.node_weights = {v: G.nodes[v][weight] for v in G.nodes()}
|
641 |
+
|
642 |
+
def update_incumbent_if_improved(self, C, C_weight):
|
643 |
+
"""Update the incumbent if the node set C has greater weight.
|
644 |
+
|
645 |
+
C is assumed to be a clique.
|
646 |
+
"""
|
647 |
+
if C_weight > self.incumbent_weight:
|
648 |
+
self.incumbent_nodes = C[:]
|
649 |
+
self.incumbent_weight = C_weight
|
650 |
+
|
651 |
+
def greedily_find_independent_set(self, P):
|
652 |
+
"""Greedily find an independent set of nodes from a set of
|
653 |
+
nodes P."""
|
654 |
+
independent_set = []
|
655 |
+
P = P[:]
|
656 |
+
while P:
|
657 |
+
v = P[0]
|
658 |
+
independent_set.append(v)
|
659 |
+
P = [w for w in P if v != w and not self.G.has_edge(v, w)]
|
660 |
+
return independent_set
|
661 |
+
|
662 |
+
def find_branching_nodes(self, P, target):
|
663 |
+
"""Find a set of nodes to branch on."""
|
664 |
+
residual_wt = {v: self.node_weights[v] for v in P}
|
665 |
+
total_wt = 0
|
666 |
+
P = P[:]
|
667 |
+
while P:
|
668 |
+
independent_set = self.greedily_find_independent_set(P)
|
669 |
+
min_wt_in_class = min(residual_wt[v] for v in independent_set)
|
670 |
+
total_wt += min_wt_in_class
|
671 |
+
if total_wt > target:
|
672 |
+
break
|
673 |
+
for v in independent_set:
|
674 |
+
residual_wt[v] -= min_wt_in_class
|
675 |
+
P = [v for v in P if residual_wt[v] != 0]
|
676 |
+
return P
|
677 |
+
|
678 |
+
def expand(self, C, C_weight, P):
|
679 |
+
"""Look for the best clique that contains all the nodes in C and zero or
|
680 |
+
more of the nodes in P, backtracking if it can be shown that no such
|
681 |
+
clique has greater weight than the incumbent.
|
682 |
+
"""
|
683 |
+
self.update_incumbent_if_improved(C, C_weight)
|
684 |
+
branching_nodes = self.find_branching_nodes(P, self.incumbent_weight - C_weight)
|
685 |
+
while branching_nodes:
|
686 |
+
v = branching_nodes.pop()
|
687 |
+
P.remove(v)
|
688 |
+
new_C = C + [v]
|
689 |
+
new_C_weight = C_weight + self.node_weights[v]
|
690 |
+
new_P = [w for w in P if self.G.has_edge(v, w)]
|
691 |
+
self.expand(new_C, new_C_weight, new_P)
|
692 |
+
|
693 |
+
def find_max_weight_clique(self):
|
694 |
+
"""Find a maximum weight clique."""
|
695 |
+
# Sort nodes in reverse order of degree for speed
|
696 |
+
nodes = sorted(self.G.nodes(), key=lambda v: self.G.degree(v), reverse=True)
|
697 |
+
nodes = [v for v in nodes if self.node_weights[v] > 0]
|
698 |
+
self.expand([], 0, nodes)
|
699 |
+
|
700 |
+
|
701 |
+
@not_implemented_for("directed")
|
702 |
+
@nx._dispatchable(node_attrs="weight")
|
703 |
+
def max_weight_clique(G, weight="weight"):
|
704 |
+
"""Find a maximum weight clique in G.
|
705 |
+
|
706 |
+
A *clique* in a graph is a set of nodes such that every two distinct nodes
|
707 |
+
are adjacent. The *weight* of a clique is the sum of the weights of its
|
708 |
+
nodes. A *maximum weight clique* of graph G is a clique C in G such that
|
709 |
+
no clique in G has weight greater than the weight of C.
|
710 |
+
|
711 |
+
Parameters
|
712 |
+
----------
|
713 |
+
G : NetworkX graph
|
714 |
+
Undirected graph
|
715 |
+
weight : string or None, optional (default='weight')
|
716 |
+
The node attribute that holds the integer value used as a weight.
|
717 |
+
If None, then each node has weight 1.
|
718 |
+
|
719 |
+
Returns
|
720 |
+
-------
|
721 |
+
clique : list
|
722 |
+
the nodes of a maximum weight clique
|
723 |
+
weight : int
|
724 |
+
the weight of a maximum weight clique
|
725 |
+
|
726 |
+
Notes
|
727 |
+
-----
|
728 |
+
The implementation is recursive, and therefore it may run into recursion
|
729 |
+
depth issues if G contains a clique whose number of nodes is close to the
|
730 |
+
recursion depth limit.
|
731 |
+
|
732 |
+
At each search node, the algorithm greedily constructs a weighted
|
733 |
+
independent set cover of part of the graph in order to find a small set of
|
734 |
+
nodes on which to branch. The algorithm is very similar to the algorithm
|
735 |
+
of Tavares et al. [1]_, other than the fact that the NetworkX version does
|
736 |
+
not use bitsets. This style of algorithm for maximum weight clique (and
|
737 |
+
maximum weight independent set, which is the same problem but on the
|
738 |
+
complement graph) has a decades-long history. See Algorithm B of Warren
|
739 |
+
and Hicks [2]_ and the references in that paper.
|
740 |
+
|
741 |
+
References
|
742 |
+
----------
|
743 |
+
.. [1] Tavares, W.A., Neto, M.B.C., Rodrigues, C.D., Michelon, P.: Um
|
744 |
+
algoritmo de branch and bound para o problema da clique máxima
|
745 |
+
ponderada. Proceedings of XLVII SBPO 1 (2015).
|
746 |
+
|
747 |
+
.. [2] Warren, Jeffrey S, Hicks, Illya V.: Combinatorial Branch-and-Bound
|
748 |
+
for the Maximum Weight Independent Set Problem. Technical Report,
|
749 |
+
Texas A&M University (2016).
|
750 |
+
"""
|
751 |
+
|
752 |
+
mwc = MaxWeightClique(G, weight)
|
753 |
+
mwc.find_max_weight_clique()
|
754 |
+
return mwc.incumbent_nodes, mwc.incumbent_weight
|
venv/lib/python3.10/site-packages/networkx/algorithms/cluster.py
ADDED
@@ -0,0 +1,609 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Algorithms to characterize the number of triangles in a graph."""
|
2 |
+
|
3 |
+
from collections import Counter
|
4 |
+
from itertools import chain, combinations
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils import not_implemented_for
|
8 |
+
|
9 |
+
__all__ = [
|
10 |
+
"triangles",
|
11 |
+
"average_clustering",
|
12 |
+
"clustering",
|
13 |
+
"transitivity",
|
14 |
+
"square_clustering",
|
15 |
+
"generalized_degree",
|
16 |
+
]
|
17 |
+
|
18 |
+
|
19 |
+
@not_implemented_for("directed")
|
20 |
+
@nx._dispatchable
|
21 |
+
def triangles(G, nodes=None):
|
22 |
+
"""Compute the number of triangles.
|
23 |
+
|
24 |
+
Finds the number of triangles that include a node as one vertex.
|
25 |
+
|
26 |
+
Parameters
|
27 |
+
----------
|
28 |
+
G : graph
|
29 |
+
A networkx graph
|
30 |
+
|
31 |
+
nodes : node, iterable of nodes, or None (default=None)
|
32 |
+
If a singleton node, return the number of triangles for that node.
|
33 |
+
If an iterable, compute the number of triangles for each of those nodes.
|
34 |
+
If `None` (the default) compute the number of triangles for all nodes in `G`.
|
35 |
+
|
36 |
+
Returns
|
37 |
+
-------
|
38 |
+
out : dict or int
|
39 |
+
If `nodes` is a container of nodes, returns number of triangles keyed by node (dict).
|
40 |
+
If `nodes` is a specific node, returns number of triangles for the node (int).
|
41 |
+
|
42 |
+
Examples
|
43 |
+
--------
|
44 |
+
>>> G = nx.complete_graph(5)
|
45 |
+
>>> print(nx.triangles(G, 0))
|
46 |
+
6
|
47 |
+
>>> print(nx.triangles(G))
|
48 |
+
{0: 6, 1: 6, 2: 6, 3: 6, 4: 6}
|
49 |
+
>>> print(list(nx.triangles(G, [0, 1]).values()))
|
50 |
+
[6, 6]
|
51 |
+
|
52 |
+
Notes
|
53 |
+
-----
|
54 |
+
Self loops are ignored.
|
55 |
+
|
56 |
+
"""
|
57 |
+
if nodes is not None:
|
58 |
+
# If `nodes` represents a single node, return only its number of triangles
|
59 |
+
if nodes in G:
|
60 |
+
return next(_triangles_and_degree_iter(G, nodes))[2] // 2
|
61 |
+
|
62 |
+
# if `nodes` is a container of nodes, then return a
|
63 |
+
# dictionary mapping node to number of triangles.
|
64 |
+
return {v: t // 2 for v, d, t, _ in _triangles_and_degree_iter(G, nodes)}
|
65 |
+
|
66 |
+
# if nodes is None, then compute triangles for the complete graph
|
67 |
+
|
68 |
+
# dict used to avoid visiting the same nodes twice
|
69 |
+
# this allows calculating/counting each triangle only once
|
70 |
+
later_nbrs = {}
|
71 |
+
|
72 |
+
# iterate over the nodes in a graph
|
73 |
+
for node, neighbors in G.adjacency():
|
74 |
+
later_nbrs[node] = {n for n in neighbors if n not in later_nbrs and n != node}
|
75 |
+
|
76 |
+
# instantiate Counter for each node to include isolated nodes
|
77 |
+
# add 1 to the count if a nodes neighbor's neighbor is also a neighbor
|
78 |
+
triangle_counts = Counter(dict.fromkeys(G, 0))
|
79 |
+
for node1, neighbors in later_nbrs.items():
|
80 |
+
for node2 in neighbors:
|
81 |
+
third_nodes = neighbors & later_nbrs[node2]
|
82 |
+
m = len(third_nodes)
|
83 |
+
triangle_counts[node1] += m
|
84 |
+
triangle_counts[node2] += m
|
85 |
+
triangle_counts.update(third_nodes)
|
86 |
+
|
87 |
+
return dict(triangle_counts)
|
88 |
+
|
89 |
+
|
90 |
+
@not_implemented_for("multigraph")
|
91 |
+
def _triangles_and_degree_iter(G, nodes=None):
|
92 |
+
"""Return an iterator of (node, degree, triangles, generalized degree).
|
93 |
+
|
94 |
+
This double counts triangles so you may want to divide by 2.
|
95 |
+
See degree(), triangles() and generalized_degree() for definitions
|
96 |
+
and details.
|
97 |
+
|
98 |
+
"""
|
99 |
+
if nodes is None:
|
100 |
+
nodes_nbrs = G.adj.items()
|
101 |
+
else:
|
102 |
+
nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
|
103 |
+
|
104 |
+
for v, v_nbrs in nodes_nbrs:
|
105 |
+
vs = set(v_nbrs) - {v}
|
106 |
+
gen_degree = Counter(len(vs & (set(G[w]) - {w})) for w in vs)
|
107 |
+
ntriangles = sum(k * val for k, val in gen_degree.items())
|
108 |
+
yield (v, len(vs), ntriangles, gen_degree)
|
109 |
+
|
110 |
+
|
111 |
+
@not_implemented_for("multigraph")
|
112 |
+
def _weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
|
113 |
+
"""Return an iterator of (node, degree, weighted_triangles).
|
114 |
+
|
115 |
+
Used for weighted clustering.
|
116 |
+
Note: this returns the geometric average weight of edges in the triangle.
|
117 |
+
Also, each triangle is counted twice (each direction).
|
118 |
+
So you may want to divide by 2.
|
119 |
+
|
120 |
+
"""
|
121 |
+
import numpy as np
|
122 |
+
|
123 |
+
if weight is None or G.number_of_edges() == 0:
|
124 |
+
max_weight = 1
|
125 |
+
else:
|
126 |
+
max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
|
127 |
+
if nodes is None:
|
128 |
+
nodes_nbrs = G.adj.items()
|
129 |
+
else:
|
130 |
+
nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
|
131 |
+
|
132 |
+
def wt(u, v):
|
133 |
+
return G[u][v].get(weight, 1) / max_weight
|
134 |
+
|
135 |
+
for i, nbrs in nodes_nbrs:
|
136 |
+
inbrs = set(nbrs) - {i}
|
137 |
+
weighted_triangles = 0
|
138 |
+
seen = set()
|
139 |
+
for j in inbrs:
|
140 |
+
seen.add(j)
|
141 |
+
# This avoids counting twice -- we double at the end.
|
142 |
+
jnbrs = set(G[j]) - seen
|
143 |
+
# Only compute the edge weight once, before the inner inner
|
144 |
+
# loop.
|
145 |
+
wij = wt(i, j)
|
146 |
+
weighted_triangles += np.cbrt(
|
147 |
+
[(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs]
|
148 |
+
).sum()
|
149 |
+
yield (i, len(inbrs), 2 * float(weighted_triangles))
|
150 |
+
|
151 |
+
|
152 |
+
@not_implemented_for("multigraph")
|
153 |
+
def _directed_triangles_and_degree_iter(G, nodes=None):
|
154 |
+
"""Return an iterator of
|
155 |
+
(node, total_degree, reciprocal_degree, directed_triangles).
|
156 |
+
|
157 |
+
Used for directed clustering.
|
158 |
+
Note that unlike `_triangles_and_degree_iter()`, this function counts
|
159 |
+
directed triangles so does not count triangles twice.
|
160 |
+
|
161 |
+
"""
|
162 |
+
nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
|
163 |
+
|
164 |
+
for i, preds, succs in nodes_nbrs:
|
165 |
+
ipreds = set(preds) - {i}
|
166 |
+
isuccs = set(succs) - {i}
|
167 |
+
|
168 |
+
directed_triangles = 0
|
169 |
+
for j in chain(ipreds, isuccs):
|
170 |
+
jpreds = set(G._pred[j]) - {j}
|
171 |
+
jsuccs = set(G._succ[j]) - {j}
|
172 |
+
directed_triangles += sum(
|
173 |
+
1
|
174 |
+
for k in chain(
|
175 |
+
(ipreds & jpreds),
|
176 |
+
(ipreds & jsuccs),
|
177 |
+
(isuccs & jpreds),
|
178 |
+
(isuccs & jsuccs),
|
179 |
+
)
|
180 |
+
)
|
181 |
+
dtotal = len(ipreds) + len(isuccs)
|
182 |
+
dbidirectional = len(ipreds & isuccs)
|
183 |
+
yield (i, dtotal, dbidirectional, directed_triangles)
|
184 |
+
|
185 |
+
|
186 |
+
@not_implemented_for("multigraph")
|
187 |
+
def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
|
188 |
+
"""Return an iterator of
|
189 |
+
(node, total_degree, reciprocal_degree, directed_weighted_triangles).
|
190 |
+
|
191 |
+
Used for directed weighted clustering.
|
192 |
+
Note that unlike `_weighted_triangles_and_degree_iter()`, this function counts
|
193 |
+
directed triangles so does not count triangles twice.
|
194 |
+
|
195 |
+
"""
|
196 |
+
import numpy as np
|
197 |
+
|
198 |
+
if weight is None or G.number_of_edges() == 0:
|
199 |
+
max_weight = 1
|
200 |
+
else:
|
201 |
+
max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
|
202 |
+
|
203 |
+
nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
|
204 |
+
|
205 |
+
def wt(u, v):
|
206 |
+
return G[u][v].get(weight, 1) / max_weight
|
207 |
+
|
208 |
+
for i, preds, succs in nodes_nbrs:
|
209 |
+
ipreds = set(preds) - {i}
|
210 |
+
isuccs = set(succs) - {i}
|
211 |
+
|
212 |
+
directed_triangles = 0
|
213 |
+
for j in ipreds:
|
214 |
+
jpreds = set(G._pred[j]) - {j}
|
215 |
+
jsuccs = set(G._succ[j]) - {j}
|
216 |
+
directed_triangles += np.cbrt(
|
217 |
+
[(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
|
218 |
+
).sum()
|
219 |
+
directed_triangles += np.cbrt(
|
220 |
+
[(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
|
221 |
+
).sum()
|
222 |
+
directed_triangles += np.cbrt(
|
223 |
+
[(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
|
224 |
+
).sum()
|
225 |
+
directed_triangles += np.cbrt(
|
226 |
+
[(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
|
227 |
+
).sum()
|
228 |
+
|
229 |
+
for j in isuccs:
|
230 |
+
jpreds = set(G._pred[j]) - {j}
|
231 |
+
jsuccs = set(G._succ[j]) - {j}
|
232 |
+
directed_triangles += np.cbrt(
|
233 |
+
[(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
|
234 |
+
).sum()
|
235 |
+
directed_triangles += np.cbrt(
|
236 |
+
[(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
|
237 |
+
).sum()
|
238 |
+
directed_triangles += np.cbrt(
|
239 |
+
[(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
|
240 |
+
).sum()
|
241 |
+
directed_triangles += np.cbrt(
|
242 |
+
[(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
|
243 |
+
).sum()
|
244 |
+
|
245 |
+
dtotal = len(ipreds) + len(isuccs)
|
246 |
+
dbidirectional = len(ipreds & isuccs)
|
247 |
+
yield (i, dtotal, dbidirectional, float(directed_triangles))
|
248 |
+
|
249 |
+
|
250 |
+
@nx._dispatchable(edge_attrs="weight")
|
251 |
+
def average_clustering(G, nodes=None, weight=None, count_zeros=True):
|
252 |
+
r"""Compute the average clustering coefficient for the graph G.
|
253 |
+
|
254 |
+
The clustering coefficient for the graph is the average,
|
255 |
+
|
256 |
+
.. math::
|
257 |
+
|
258 |
+
C = \frac{1}{n}\sum_{v \in G} c_v,
|
259 |
+
|
260 |
+
where :math:`n` is the number of nodes in `G`.
|
261 |
+
|
262 |
+
Parameters
|
263 |
+
----------
|
264 |
+
G : graph
|
265 |
+
|
266 |
+
nodes : container of nodes, optional (default=all nodes in G)
|
267 |
+
Compute average clustering for nodes in this container.
|
268 |
+
|
269 |
+
weight : string or None, optional (default=None)
|
270 |
+
The edge attribute that holds the numerical value used as a weight.
|
271 |
+
If None, then each edge has weight 1.
|
272 |
+
|
273 |
+
count_zeros : bool
|
274 |
+
If False include only the nodes with nonzero clustering in the average.
|
275 |
+
|
276 |
+
Returns
|
277 |
+
-------
|
278 |
+
avg : float
|
279 |
+
Average clustering
|
280 |
+
|
281 |
+
Examples
|
282 |
+
--------
|
283 |
+
>>> G = nx.complete_graph(5)
|
284 |
+
>>> print(nx.average_clustering(G))
|
285 |
+
1.0
|
286 |
+
|
287 |
+
Notes
|
288 |
+
-----
|
289 |
+
This is a space saving routine; it might be faster
|
290 |
+
to use the clustering function to get a list and then take the average.
|
291 |
+
|
292 |
+
Self loops are ignored.
|
293 |
+
|
294 |
+
References
|
295 |
+
----------
|
296 |
+
.. [1] Generalizations of the clustering coefficient to weighted
|
297 |
+
complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
|
298 |
+
K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
|
299 |
+
http://jponnela.com/web_documents/a9.pdf
|
300 |
+
.. [2] Marcus Kaiser, Mean clustering coefficients: the role of isolated
|
301 |
+
nodes and leafs on clustering measures for small-world networks.
|
302 |
+
https://arxiv.org/abs/0802.2512
|
303 |
+
"""
|
304 |
+
c = clustering(G, nodes, weight=weight).values()
|
305 |
+
if not count_zeros:
|
306 |
+
c = [v for v in c if abs(v) > 0]
|
307 |
+
return sum(c) / len(c)
|
308 |
+
|
309 |
+
|
310 |
+
@nx._dispatchable(edge_attrs="weight")
|
311 |
+
def clustering(G, nodes=None, weight=None):
|
312 |
+
r"""Compute the clustering coefficient for nodes.
|
313 |
+
|
314 |
+
For unweighted graphs, the clustering of a node :math:`u`
|
315 |
+
is the fraction of possible triangles through that node that exist,
|
316 |
+
|
317 |
+
.. math::
|
318 |
+
|
319 |
+
c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)},
|
320 |
+
|
321 |
+
where :math:`T(u)` is the number of triangles through node :math:`u` and
|
322 |
+
:math:`deg(u)` is the degree of :math:`u`.
|
323 |
+
|
324 |
+
For weighted graphs, there are several ways to define clustering [1]_.
|
325 |
+
the one used here is defined
|
326 |
+
as the geometric average of the subgraph edge weights [2]_,
|
327 |
+
|
328 |
+
.. math::
|
329 |
+
|
330 |
+
c_u = \frac{1}{deg(u)(deg(u)-1))}
|
331 |
+
\sum_{vw} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}.
|
332 |
+
|
333 |
+
The edge weights :math:`\hat{w}_{uv}` are normalized by the maximum weight
|
334 |
+
in the network :math:`\hat{w}_{uv} = w_{uv}/\max(w)`.
|
335 |
+
|
336 |
+
The value of :math:`c_u` is assigned to 0 if :math:`deg(u) < 2`.
|
337 |
+
|
338 |
+
Additionally, this weighted definition has been generalized to support negative edge weights [3]_.
|
339 |
+
|
340 |
+
For directed graphs, the clustering is similarly defined as the fraction
|
341 |
+
of all possible directed triangles or geometric average of the subgraph
|
342 |
+
edge weights for unweighted and weighted directed graph respectively [4]_.
|
343 |
+
|
344 |
+
.. math::
|
345 |
+
|
346 |
+
c_u = \frac{T(u)}{2(deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u))},
|
347 |
+
|
348 |
+
where :math:`T(u)` is the number of directed triangles through node
|
349 |
+
:math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of
|
350 |
+
:math:`u` and :math:`deg^{\leftrightarrow}(u)` is the reciprocal degree of
|
351 |
+
:math:`u`.
|
352 |
+
|
353 |
+
|
354 |
+
Parameters
|
355 |
+
----------
|
356 |
+
G : graph
|
357 |
+
|
358 |
+
nodes : node, iterable of nodes, or None (default=None)
|
359 |
+
If a singleton node, return the number of triangles for that node.
|
360 |
+
If an iterable, compute the number of triangles for each of those nodes.
|
361 |
+
If `None` (the default) compute the number of triangles for all nodes in `G`.
|
362 |
+
|
363 |
+
weight : string or None, optional (default=None)
|
364 |
+
The edge attribute that holds the numerical value used as a weight.
|
365 |
+
If None, then each edge has weight 1.
|
366 |
+
|
367 |
+
Returns
|
368 |
+
-------
|
369 |
+
out : float, or dictionary
|
370 |
+
Clustering coefficient at specified nodes
|
371 |
+
|
372 |
+
Examples
|
373 |
+
--------
|
374 |
+
>>> G = nx.complete_graph(5)
|
375 |
+
>>> print(nx.clustering(G, 0))
|
376 |
+
1.0
|
377 |
+
>>> print(nx.clustering(G))
|
378 |
+
{0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
|
379 |
+
|
380 |
+
Notes
|
381 |
+
-----
|
382 |
+
Self loops are ignored.
|
383 |
+
|
384 |
+
References
|
385 |
+
----------
|
386 |
+
.. [1] Generalizations of the clustering coefficient to weighted
|
387 |
+
complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
|
388 |
+
K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
|
389 |
+
http://jponnela.com/web_documents/a9.pdf
|
390 |
+
.. [2] Intensity and coherence of motifs in weighted complex
|
391 |
+
networks by J. P. Onnela, J. Saramäki, J. Kertész, and K. Kaski,
|
392 |
+
Physical Review E, 71(6), 065103 (2005).
|
393 |
+
.. [3] Generalization of Clustering Coefficients to Signed Correlation Networks
|
394 |
+
by G. Costantini and M. Perugini, PloS one, 9(2), e88669 (2014).
|
395 |
+
.. [4] Clustering in complex directed networks by G. Fagiolo,
|
396 |
+
Physical Review E, 76(2), 026107 (2007).
|
397 |
+
"""
|
398 |
+
if G.is_directed():
|
399 |
+
if weight is not None:
|
400 |
+
td_iter = _directed_weighted_triangles_and_degree_iter(G, nodes, weight)
|
401 |
+
clusterc = {
|
402 |
+
v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
|
403 |
+
for v, dt, db, t in td_iter
|
404 |
+
}
|
405 |
+
else:
|
406 |
+
td_iter = _directed_triangles_and_degree_iter(G, nodes)
|
407 |
+
clusterc = {
|
408 |
+
v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
|
409 |
+
for v, dt, db, t in td_iter
|
410 |
+
}
|
411 |
+
else:
|
412 |
+
# The formula 2*T/(d*(d-1)) from docs is t/(d*(d-1)) here b/c t==2*T
|
413 |
+
if weight is not None:
|
414 |
+
td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight)
|
415 |
+
clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t in td_iter}
|
416 |
+
else:
|
417 |
+
td_iter = _triangles_and_degree_iter(G, nodes)
|
418 |
+
clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t, _ in td_iter}
|
419 |
+
if nodes in G:
|
420 |
+
# Return the value of the sole entry in the dictionary.
|
421 |
+
return clusterc[nodes]
|
422 |
+
return clusterc
|
423 |
+
|
424 |
+
|
425 |
+
@nx._dispatchable
|
426 |
+
def transitivity(G):
|
427 |
+
r"""Compute graph transitivity, the fraction of all possible triangles
|
428 |
+
present in G.
|
429 |
+
|
430 |
+
Possible triangles are identified by the number of "triads"
|
431 |
+
(two edges with a shared vertex).
|
432 |
+
|
433 |
+
The transitivity is
|
434 |
+
|
435 |
+
.. math::
|
436 |
+
|
437 |
+
T = 3\frac{\#triangles}{\#triads}.
|
438 |
+
|
439 |
+
Parameters
|
440 |
+
----------
|
441 |
+
G : graph
|
442 |
+
|
443 |
+
Returns
|
444 |
+
-------
|
445 |
+
out : float
|
446 |
+
Transitivity
|
447 |
+
|
448 |
+
Notes
|
449 |
+
-----
|
450 |
+
Self loops are ignored.
|
451 |
+
|
452 |
+
Examples
|
453 |
+
--------
|
454 |
+
>>> G = nx.complete_graph(5)
|
455 |
+
>>> print(nx.transitivity(G))
|
456 |
+
1.0
|
457 |
+
"""
|
458 |
+
triangles_contri = [
|
459 |
+
(t, d * (d - 1)) for v, d, t, _ in _triangles_and_degree_iter(G)
|
460 |
+
]
|
461 |
+
# If the graph is empty
|
462 |
+
if len(triangles_contri) == 0:
|
463 |
+
return 0
|
464 |
+
triangles, contri = map(sum, zip(*triangles_contri))
|
465 |
+
return 0 if triangles == 0 else triangles / contri
|
466 |
+
|
467 |
+
|
468 |
+
@nx._dispatchable
|
469 |
+
def square_clustering(G, nodes=None):
|
470 |
+
r"""Compute the squares clustering coefficient for nodes.
|
471 |
+
|
472 |
+
For each node return the fraction of possible squares that exist at
|
473 |
+
the node [1]_
|
474 |
+
|
475 |
+
.. math::
|
476 |
+
C_4(v) = \frac{ \sum_{u=1}^{k_v}
|
477 |
+
\sum_{w=u+1}^{k_v} q_v(u,w) }{ \sum_{u=1}^{k_v}
|
478 |
+
\sum_{w=u+1}^{k_v} [a_v(u,w) + q_v(u,w)]},
|
479 |
+
|
480 |
+
where :math:`q_v(u,w)` are the number of common neighbors of :math:`u` and
|
481 |
+
:math:`w` other than :math:`v` (ie squares), and :math:`a_v(u,w) = (k_u -
|
482 |
+
(1+q_v(u,w)+\theta_{uv})) + (k_w - (1+q_v(u,w)+\theta_{uw}))`, where
|
483 |
+
:math:`\theta_{uw} = 1` if :math:`u` and :math:`w` are connected and 0
|
484 |
+
otherwise. [2]_
|
485 |
+
|
486 |
+
Parameters
|
487 |
+
----------
|
488 |
+
G : graph
|
489 |
+
|
490 |
+
nodes : container of nodes, optional (default=all nodes in G)
|
491 |
+
Compute clustering for nodes in this container.
|
492 |
+
|
493 |
+
Returns
|
494 |
+
-------
|
495 |
+
c4 : dictionary
|
496 |
+
A dictionary keyed by node with the square clustering coefficient value.
|
497 |
+
|
498 |
+
Examples
|
499 |
+
--------
|
500 |
+
>>> G = nx.complete_graph(5)
|
501 |
+
>>> print(nx.square_clustering(G, 0))
|
502 |
+
1.0
|
503 |
+
>>> print(nx.square_clustering(G))
|
504 |
+
{0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
|
505 |
+
|
506 |
+
Notes
|
507 |
+
-----
|
508 |
+
While :math:`C_3(v)` (triangle clustering) gives the probability that
|
509 |
+
two neighbors of node v are connected with each other, :math:`C_4(v)` is
|
510 |
+
the probability that two neighbors of node v share a common
|
511 |
+
neighbor different from v. This algorithm can be applied to both
|
512 |
+
bipartite and unipartite networks.
|
513 |
+
|
514 |
+
References
|
515 |
+
----------
|
516 |
+
.. [1] Pedro G. Lind, Marta C. González, and Hans J. Herrmann. 2005
|
517 |
+
Cycles and clustering in bipartite networks.
|
518 |
+
Physical Review E (72) 056127.
|
519 |
+
.. [2] Zhang, Peng et al. Clustering Coefficient and Community Structure of
|
520 |
+
Bipartite Networks. Physica A: Statistical Mechanics and its Applications 387.27 (2008): 6869–6875.
|
521 |
+
https://arxiv.org/abs/0710.0117v1
|
522 |
+
"""
|
523 |
+
if nodes is None:
|
524 |
+
node_iter = G
|
525 |
+
else:
|
526 |
+
node_iter = G.nbunch_iter(nodes)
|
527 |
+
clustering = {}
|
528 |
+
for v in node_iter:
|
529 |
+
clustering[v] = 0
|
530 |
+
potential = 0
|
531 |
+
for u, w in combinations(G[v], 2):
|
532 |
+
squares = len((set(G[u]) & set(G[w])) - {v})
|
533 |
+
clustering[v] += squares
|
534 |
+
degm = squares + 1
|
535 |
+
if w in G[u]:
|
536 |
+
degm += 1
|
537 |
+
potential += (len(G[u]) - degm) + (len(G[w]) - degm) + squares
|
538 |
+
if potential > 0:
|
539 |
+
clustering[v] /= potential
|
540 |
+
if nodes in G:
|
541 |
+
# Return the value of the sole entry in the dictionary.
|
542 |
+
return clustering[nodes]
|
543 |
+
return clustering
|
544 |
+
|
545 |
+
|
546 |
+
@not_implemented_for("directed")
|
547 |
+
@nx._dispatchable
|
548 |
+
def generalized_degree(G, nodes=None):
|
549 |
+
r"""Compute the generalized degree for nodes.
|
550 |
+
|
551 |
+
For each node, the generalized degree shows how many edges of given
|
552 |
+
triangle multiplicity the node is connected to. The triangle multiplicity
|
553 |
+
of an edge is the number of triangles an edge participates in. The
|
554 |
+
generalized degree of node :math:`i` can be written as a vector
|
555 |
+
:math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` where
|
556 |
+
:math:`k_i^{(j)}` is the number of edges attached to node :math:`i` that
|
557 |
+
participate in :math:`j` triangles.
|
558 |
+
|
559 |
+
Parameters
|
560 |
+
----------
|
561 |
+
G : graph
|
562 |
+
|
563 |
+
nodes : container of nodes, optional (default=all nodes in G)
|
564 |
+
Compute the generalized degree for nodes in this container.
|
565 |
+
|
566 |
+
Returns
|
567 |
+
-------
|
568 |
+
out : Counter, or dictionary of Counters
|
569 |
+
Generalized degree of specified nodes. The Counter is keyed by edge
|
570 |
+
triangle multiplicity.
|
571 |
+
|
572 |
+
Examples
|
573 |
+
--------
|
574 |
+
>>> G = nx.complete_graph(5)
|
575 |
+
>>> print(nx.generalized_degree(G, 0))
|
576 |
+
Counter({3: 4})
|
577 |
+
>>> print(nx.generalized_degree(G))
|
578 |
+
{0: Counter({3: 4}), 1: Counter({3: 4}), 2: Counter({3: 4}), 3: Counter({3: 4}), 4: Counter({3: 4})}
|
579 |
+
|
580 |
+
To recover the number of triangles attached to a node:
|
581 |
+
|
582 |
+
>>> k1 = nx.generalized_degree(G, 0)
|
583 |
+
>>> sum([k * v for k, v in k1.items()]) / 2 == nx.triangles(G, 0)
|
584 |
+
True
|
585 |
+
|
586 |
+
Notes
|
587 |
+
-----
|
588 |
+
Self loops are ignored.
|
589 |
+
|
590 |
+
In a network of N nodes, the highest triangle multiplicity an edge can have
|
591 |
+
is N-2.
|
592 |
+
|
593 |
+
The return value does not include a `zero` entry if no edges of a
|
594 |
+
particular triangle multiplicity are present.
|
595 |
+
|
596 |
+
The number of triangles node :math:`i` is attached to can be recovered from
|
597 |
+
the generalized degree :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc,
|
598 |
+
k_i^{(N-2)})` by :math:`(k_i^{(1)}+2k_i^{(2)}+\dotsc +(N-2)k_i^{(N-2)})/2`.
|
599 |
+
|
600 |
+
References
|
601 |
+
----------
|
602 |
+
.. [1] Networks with arbitrary edge multiplicities by V. Zlatić,
|
603 |
+
D. Garlaschelli and G. Caldarelli, EPL (Europhysics Letters),
|
604 |
+
Volume 97, Number 2 (2012).
|
605 |
+
https://iopscience.iop.org/article/10.1209/0295-5075/97/28005
|
606 |
+
"""
|
607 |
+
if nodes in G:
|
608 |
+
return next(_triangles_and_degree_iter(G, nodes))[3]
|
609 |
+
return {v: gd for v, d, t, gd in _triangles_and_degree_iter(G, nodes)}
|
venv/lib/python3.10/site-packages/networkx/algorithms/core.py
ADDED
@@ -0,0 +1,648 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Find the k-cores of a graph.
|
3 |
+
|
4 |
+
The k-core is found by recursively pruning nodes with degrees less than k.
|
5 |
+
|
6 |
+
See the following references for details:
|
7 |
+
|
8 |
+
An O(m) Algorithm for Cores Decomposition of Networks
|
9 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
10 |
+
https://arxiv.org/abs/cs.DS/0310049
|
11 |
+
|
12 |
+
Generalized Cores
|
13 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2002.
|
14 |
+
https://arxiv.org/pdf/cs/0202039
|
15 |
+
|
16 |
+
For directed graphs a more general notion is that of D-cores which
|
17 |
+
looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core
|
18 |
+
is the k-core.
|
19 |
+
|
20 |
+
D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy
|
21 |
+
Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011.
|
22 |
+
http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf
|
23 |
+
|
24 |
+
Multi-scale structure and topological anomaly detection via a new network \
|
25 |
+
statistic: The onion decomposition
|
26 |
+
L. Hébert-Dufresne, J. A. Grochow, and A. Allard
|
27 |
+
Scientific Reports 6, 31708 (2016)
|
28 |
+
http://doi.org/10.1038/srep31708
|
29 |
+
|
30 |
+
"""
|
31 |
+
import networkx as nx
|
32 |
+
|
33 |
+
__all__ = [
|
34 |
+
"core_number",
|
35 |
+
"k_core",
|
36 |
+
"k_shell",
|
37 |
+
"k_crust",
|
38 |
+
"k_corona",
|
39 |
+
"k_truss",
|
40 |
+
"onion_layers",
|
41 |
+
]
|
42 |
+
|
43 |
+
|
44 |
+
@nx.utils.not_implemented_for("multigraph")
|
45 |
+
@nx._dispatchable
|
46 |
+
def core_number(G):
|
47 |
+
"""Returns the core number for each node.
|
48 |
+
|
49 |
+
A k-core is a maximal subgraph that contains nodes of degree k or more.
|
50 |
+
|
51 |
+
The core number of a node is the largest value k of a k-core containing
|
52 |
+
that node.
|
53 |
+
|
54 |
+
Parameters
|
55 |
+
----------
|
56 |
+
G : NetworkX graph
|
57 |
+
An undirected or directed graph
|
58 |
+
|
59 |
+
Returns
|
60 |
+
-------
|
61 |
+
core_number : dictionary
|
62 |
+
A dictionary keyed by node to the core number.
|
63 |
+
|
64 |
+
Raises
|
65 |
+
------
|
66 |
+
NetworkXNotImplemented
|
67 |
+
If `G` is a multigraph or contains self loops.
|
68 |
+
|
69 |
+
Notes
|
70 |
+
-----
|
71 |
+
For directed graphs the node degree is defined to be the
|
72 |
+
in-degree + out-degree.
|
73 |
+
|
74 |
+
Examples
|
75 |
+
--------
|
76 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
77 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
78 |
+
>>> nx.core_number(H)
|
79 |
+
{0: 1, 1: 2, 2: 2, 3: 2, 4: 1, 5: 2, 6: 0}
|
80 |
+
>>> G = nx.DiGraph()
|
81 |
+
>>> G.add_edges_from([(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)])
|
82 |
+
>>> nx.core_number(G)
|
83 |
+
{1: 2, 2: 2, 3: 2, 4: 2}
|
84 |
+
|
85 |
+
References
|
86 |
+
----------
|
87 |
+
.. [1] An O(m) Algorithm for Cores Decomposition of Networks
|
88 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
89 |
+
https://arxiv.org/abs/cs.DS/0310049
|
90 |
+
"""
|
91 |
+
if nx.number_of_selfloops(G) > 0:
|
92 |
+
msg = (
|
93 |
+
"Input graph has self loops which is not permitted; "
|
94 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
95 |
+
)
|
96 |
+
raise nx.NetworkXNotImplemented(msg)
|
97 |
+
degrees = dict(G.degree())
|
98 |
+
# Sort nodes by degree.
|
99 |
+
nodes = sorted(degrees, key=degrees.get)
|
100 |
+
bin_boundaries = [0]
|
101 |
+
curr_degree = 0
|
102 |
+
for i, v in enumerate(nodes):
|
103 |
+
if degrees[v] > curr_degree:
|
104 |
+
bin_boundaries.extend([i] * (degrees[v] - curr_degree))
|
105 |
+
curr_degree = degrees[v]
|
106 |
+
node_pos = {v: pos for pos, v in enumerate(nodes)}
|
107 |
+
# The initial guess for the core number of a node is its degree.
|
108 |
+
core = degrees
|
109 |
+
nbrs = {v: list(nx.all_neighbors(G, v)) for v in G}
|
110 |
+
for v in nodes:
|
111 |
+
for u in nbrs[v]:
|
112 |
+
if core[u] > core[v]:
|
113 |
+
nbrs[u].remove(v)
|
114 |
+
pos = node_pos[u]
|
115 |
+
bin_start = bin_boundaries[core[u]]
|
116 |
+
node_pos[u] = bin_start
|
117 |
+
node_pos[nodes[bin_start]] = pos
|
118 |
+
nodes[bin_start], nodes[pos] = nodes[pos], nodes[bin_start]
|
119 |
+
bin_boundaries[core[u]] += 1
|
120 |
+
core[u] -= 1
|
121 |
+
return core
|
122 |
+
|
123 |
+
|
124 |
+
def _core_subgraph(G, k_filter, k=None, core=None):
|
125 |
+
"""Returns the subgraph induced by nodes passing filter `k_filter`.
|
126 |
+
|
127 |
+
Parameters
|
128 |
+
----------
|
129 |
+
G : NetworkX graph
|
130 |
+
The graph or directed graph to process
|
131 |
+
k_filter : filter function
|
132 |
+
This function filters the nodes chosen. It takes three inputs:
|
133 |
+
A node of G, the filter's cutoff, and the core dict of the graph.
|
134 |
+
The function should return a Boolean value.
|
135 |
+
k : int, optional
|
136 |
+
The order of the core. If not specified use the max core number.
|
137 |
+
This value is used as the cutoff for the filter.
|
138 |
+
core : dict, optional
|
139 |
+
Precomputed core numbers keyed by node for the graph `G`.
|
140 |
+
If not specified, the core numbers will be computed from `G`.
|
141 |
+
|
142 |
+
"""
|
143 |
+
if core is None:
|
144 |
+
core = core_number(G)
|
145 |
+
if k is None:
|
146 |
+
k = max(core.values())
|
147 |
+
nodes = (v for v in core if k_filter(v, k, core))
|
148 |
+
return G.subgraph(nodes).copy()
|
149 |
+
|
150 |
+
|
151 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
152 |
+
def k_core(G, k=None, core_number=None):
|
153 |
+
"""Returns the k-core of G.
|
154 |
+
|
155 |
+
A k-core is a maximal subgraph that contains nodes of degree `k` or more.
|
156 |
+
|
157 |
+
.. deprecated:: 3.3
|
158 |
+
`k_core` will not accept `MultiGraph` objects in version 3.5.
|
159 |
+
|
160 |
+
Parameters
|
161 |
+
----------
|
162 |
+
G : NetworkX graph
|
163 |
+
A graph or directed graph
|
164 |
+
k : int, optional
|
165 |
+
The order of the core. If not specified return the main core.
|
166 |
+
core_number : dictionary, optional
|
167 |
+
Precomputed core numbers for the graph G.
|
168 |
+
|
169 |
+
Returns
|
170 |
+
-------
|
171 |
+
G : NetworkX graph
|
172 |
+
The k-core subgraph
|
173 |
+
|
174 |
+
Raises
|
175 |
+
------
|
176 |
+
NetworkXNotImplemented
|
177 |
+
The k-core is not defined for multigraphs or graphs with self loops.
|
178 |
+
|
179 |
+
Notes
|
180 |
+
-----
|
181 |
+
The main core is the core with `k` as the largest core_number.
|
182 |
+
|
183 |
+
For directed graphs the node degree is defined to be the
|
184 |
+
in-degree + out-degree.
|
185 |
+
|
186 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
187 |
+
|
188 |
+
Examples
|
189 |
+
--------
|
190 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
191 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
192 |
+
>>> H.degree
|
193 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
194 |
+
>>> nx.k_core(H).nodes
|
195 |
+
NodeView((1, 2, 3, 5))
|
196 |
+
|
197 |
+
See Also
|
198 |
+
--------
|
199 |
+
core_number
|
200 |
+
|
201 |
+
References
|
202 |
+
----------
|
203 |
+
.. [1] An O(m) Algorithm for Cores Decomposition of Networks
|
204 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
205 |
+
https://arxiv.org/abs/cs.DS/0310049
|
206 |
+
"""
|
207 |
+
|
208 |
+
import warnings
|
209 |
+
|
210 |
+
if G.is_multigraph():
|
211 |
+
warnings.warn(
|
212 |
+
(
|
213 |
+
"\n\n`k_core` will not accept `MultiGraph` objects in version 3.5.\n"
|
214 |
+
"Convert it to an undirected graph instead, using::\n\n"
|
215 |
+
"\tG = nx.Graph(G)\n"
|
216 |
+
),
|
217 |
+
category=DeprecationWarning,
|
218 |
+
stacklevel=5,
|
219 |
+
)
|
220 |
+
|
221 |
+
def k_filter(v, k, c):
|
222 |
+
return c[v] >= k
|
223 |
+
|
224 |
+
return _core_subgraph(G, k_filter, k, core_number)
|
225 |
+
|
226 |
+
|
227 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
228 |
+
def k_shell(G, k=None, core_number=None):
|
229 |
+
"""Returns the k-shell of G.
|
230 |
+
|
231 |
+
The k-shell is the subgraph induced by nodes with core number k.
|
232 |
+
That is, nodes in the k-core that are not in the (k+1)-core.
|
233 |
+
|
234 |
+
.. deprecated:: 3.3
|
235 |
+
`k_shell` will not accept `MultiGraph` objects in version 3.5.
|
236 |
+
|
237 |
+
Parameters
|
238 |
+
----------
|
239 |
+
G : NetworkX graph
|
240 |
+
A graph or directed graph.
|
241 |
+
k : int, optional
|
242 |
+
The order of the shell. If not specified return the outer shell.
|
243 |
+
core_number : dictionary, optional
|
244 |
+
Precomputed core numbers for the graph G.
|
245 |
+
|
246 |
+
|
247 |
+
Returns
|
248 |
+
-------
|
249 |
+
G : NetworkX graph
|
250 |
+
The k-shell subgraph
|
251 |
+
|
252 |
+
Raises
|
253 |
+
------
|
254 |
+
NetworkXNotImplemented
|
255 |
+
The k-shell is not implemented for multigraphs or graphs with self loops.
|
256 |
+
|
257 |
+
Notes
|
258 |
+
-----
|
259 |
+
This is similar to k_corona but in that case only neighbors in the
|
260 |
+
k-core are considered.
|
261 |
+
|
262 |
+
For directed graphs the node degree is defined to be the
|
263 |
+
in-degree + out-degree.
|
264 |
+
|
265 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
266 |
+
|
267 |
+
Examples
|
268 |
+
--------
|
269 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
270 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
271 |
+
>>> H.degree
|
272 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
273 |
+
>>> nx.k_shell(H, k=1).nodes
|
274 |
+
NodeView((0, 4))
|
275 |
+
|
276 |
+
See Also
|
277 |
+
--------
|
278 |
+
core_number
|
279 |
+
k_corona
|
280 |
+
|
281 |
+
|
282 |
+
References
|
283 |
+
----------
|
284 |
+
.. [1] A model of Internet topology using k-shell decomposition
|
285 |
+
Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
|
286 |
+
and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
|
287 |
+
http://www.pnas.org/content/104/27/11150.full
|
288 |
+
"""
|
289 |
+
|
290 |
+
import warnings
|
291 |
+
|
292 |
+
if G.is_multigraph():
|
293 |
+
warnings.warn(
|
294 |
+
(
|
295 |
+
"\n\n`k_shell` will not accept `MultiGraph` objects in version 3.5.\n"
|
296 |
+
"Convert it to an undirected graph instead, using::\n\n"
|
297 |
+
"\tG = nx.Graph(G)\n"
|
298 |
+
),
|
299 |
+
category=DeprecationWarning,
|
300 |
+
stacklevel=5,
|
301 |
+
)
|
302 |
+
|
303 |
+
def k_filter(v, k, c):
|
304 |
+
return c[v] == k
|
305 |
+
|
306 |
+
return _core_subgraph(G, k_filter, k, core_number)
|
307 |
+
|
308 |
+
|
309 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
310 |
+
def k_crust(G, k=None, core_number=None):
|
311 |
+
"""Returns the k-crust of G.
|
312 |
+
|
313 |
+
The k-crust is the graph G with the edges of the k-core removed
|
314 |
+
and isolated nodes found after the removal of edges are also removed.
|
315 |
+
|
316 |
+
.. deprecated:: 3.3
|
317 |
+
`k_crust` will not accept `MultiGraph` objects in version 3.5.
|
318 |
+
|
319 |
+
Parameters
|
320 |
+
----------
|
321 |
+
G : NetworkX graph
|
322 |
+
A graph or directed graph.
|
323 |
+
k : int, optional
|
324 |
+
The order of the shell. If not specified return the main crust.
|
325 |
+
core_number : dictionary, optional
|
326 |
+
Precomputed core numbers for the graph G.
|
327 |
+
|
328 |
+
Returns
|
329 |
+
-------
|
330 |
+
G : NetworkX graph
|
331 |
+
The k-crust subgraph
|
332 |
+
|
333 |
+
Raises
|
334 |
+
------
|
335 |
+
NetworkXNotImplemented
|
336 |
+
The k-crust is not implemented for multigraphs or graphs with self loops.
|
337 |
+
|
338 |
+
Notes
|
339 |
+
-----
|
340 |
+
This definition of k-crust is different than the definition in [1]_.
|
341 |
+
The k-crust in [1]_ is equivalent to the k+1 crust of this algorithm.
|
342 |
+
|
343 |
+
For directed graphs the node degree is defined to be the
|
344 |
+
in-degree + out-degree.
|
345 |
+
|
346 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
347 |
+
|
348 |
+
Examples
|
349 |
+
--------
|
350 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
351 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
352 |
+
>>> H.degree
|
353 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
354 |
+
>>> nx.k_crust(H, k=1).nodes
|
355 |
+
NodeView((0, 4, 6))
|
356 |
+
|
357 |
+
See Also
|
358 |
+
--------
|
359 |
+
core_number
|
360 |
+
|
361 |
+
References
|
362 |
+
----------
|
363 |
+
.. [1] A model of Internet topology using k-shell decomposition
|
364 |
+
Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
|
365 |
+
and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
|
366 |
+
http://www.pnas.org/content/104/27/11150.full
|
367 |
+
"""
|
368 |
+
|
369 |
+
import warnings
|
370 |
+
|
371 |
+
if G.is_multigraph():
|
372 |
+
warnings.warn(
|
373 |
+
(
|
374 |
+
"\n\n`k_crust` will not accept `MultiGraph` objects in version 3.5.\n"
|
375 |
+
"Convert it to an undirected graph instead, using::\n\n"
|
376 |
+
"\tG = nx.Graph(G)\n"
|
377 |
+
),
|
378 |
+
category=DeprecationWarning,
|
379 |
+
stacklevel=5,
|
380 |
+
)
|
381 |
+
|
382 |
+
# Default for k is one less than in _core_subgraph, so just inline.
|
383 |
+
# Filter is c[v] <= k
|
384 |
+
if core_number is None:
|
385 |
+
core_number = nx.core_number(G)
|
386 |
+
if k is None:
|
387 |
+
k = max(core_number.values()) - 1
|
388 |
+
nodes = (v for v in core_number if core_number[v] <= k)
|
389 |
+
return G.subgraph(nodes).copy()
|
390 |
+
|
391 |
+
|
392 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
393 |
+
def k_corona(G, k, core_number=None):
|
394 |
+
"""Returns the k-corona of G.
|
395 |
+
|
396 |
+
The k-corona is the subgraph of nodes in the k-core which have
|
397 |
+
exactly k neighbors in the k-core.
|
398 |
+
|
399 |
+
.. deprecated:: 3.3
|
400 |
+
`k_corona` will not accept `MultiGraph` objects in version 3.5.
|
401 |
+
|
402 |
+
Parameters
|
403 |
+
----------
|
404 |
+
G : NetworkX graph
|
405 |
+
A graph or directed graph
|
406 |
+
k : int
|
407 |
+
The order of the corona.
|
408 |
+
core_number : dictionary, optional
|
409 |
+
Precomputed core numbers for the graph G.
|
410 |
+
|
411 |
+
Returns
|
412 |
+
-------
|
413 |
+
G : NetworkX graph
|
414 |
+
The k-corona subgraph
|
415 |
+
|
416 |
+
Raises
|
417 |
+
------
|
418 |
+
NetworkXNotImplemented
|
419 |
+
The k-corona is not defined for multigraphs or graphs with self loops.
|
420 |
+
|
421 |
+
Notes
|
422 |
+
-----
|
423 |
+
For directed graphs the node degree is defined to be the
|
424 |
+
in-degree + out-degree.
|
425 |
+
|
426 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
427 |
+
|
428 |
+
Examples
|
429 |
+
--------
|
430 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
431 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
432 |
+
>>> H.degree
|
433 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
434 |
+
>>> nx.k_corona(H, k=2).nodes
|
435 |
+
NodeView((1, 2, 3, 5))
|
436 |
+
|
437 |
+
See Also
|
438 |
+
--------
|
439 |
+
core_number
|
440 |
+
|
441 |
+
References
|
442 |
+
----------
|
443 |
+
.. [1] k -core (bootstrap) percolation on complex networks:
|
444 |
+
Critical phenomena and nonlocal effects,
|
445 |
+
A. V. Goltsev, S. N. Dorogovtsev, and J. F. F. Mendes,
|
446 |
+
Phys. Rev. E 73, 056101 (2006)
|
447 |
+
http://link.aps.org/doi/10.1103/PhysRevE.73.056101
|
448 |
+
"""
|
449 |
+
|
450 |
+
import warnings
|
451 |
+
|
452 |
+
if G.is_multigraph():
|
453 |
+
warnings.warn(
|
454 |
+
(
|
455 |
+
"\n\n`k_corona` will not accept `MultiGraph` objects in version 3.5.\n"
|
456 |
+
"Convert it to an undirected graph instead, using::\n\n"
|
457 |
+
"\tG = nx.Graph(G)\n"
|
458 |
+
),
|
459 |
+
category=DeprecationWarning,
|
460 |
+
stacklevel=5,
|
461 |
+
)
|
462 |
+
|
463 |
+
def func(v, k, c):
|
464 |
+
return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k)
|
465 |
+
|
466 |
+
return _core_subgraph(G, func, k, core_number)
|
467 |
+
|
468 |
+
|
469 |
+
@nx.utils.not_implemented_for("directed")
|
470 |
+
@nx.utils.not_implemented_for("multigraph")
|
471 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
472 |
+
def k_truss(G, k):
|
473 |
+
"""Returns the k-truss of `G`.
|
474 |
+
|
475 |
+
The k-truss is the maximal induced subgraph of `G` which contains at least
|
476 |
+
three vertices where every edge is incident to at least `k-2` triangles.
|
477 |
+
|
478 |
+
Parameters
|
479 |
+
----------
|
480 |
+
G : NetworkX graph
|
481 |
+
An undirected graph
|
482 |
+
k : int
|
483 |
+
The order of the truss
|
484 |
+
|
485 |
+
Returns
|
486 |
+
-------
|
487 |
+
H : NetworkX graph
|
488 |
+
The k-truss subgraph
|
489 |
+
|
490 |
+
Raises
|
491 |
+
------
|
492 |
+
NetworkXNotImplemented
|
493 |
+
If `G` is a multigraph or directed graph or if it contains self loops.
|
494 |
+
|
495 |
+
Notes
|
496 |
+
-----
|
497 |
+
A k-clique is a (k-2)-truss and a k-truss is a (k+1)-core.
|
498 |
+
|
499 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
500 |
+
|
501 |
+
K-trusses were originally defined in [2] which states that the k-truss
|
502 |
+
is the maximal induced subgraph where each edge belongs to at least
|
503 |
+
`k-2` triangles. A more recent paper, [1], uses a slightly different
|
504 |
+
definition requiring that each edge belong to at least `k` triangles.
|
505 |
+
This implementation uses the original definition of `k-2` triangles.
|
506 |
+
|
507 |
+
Examples
|
508 |
+
--------
|
509 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
510 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
511 |
+
>>> H.degree
|
512 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
513 |
+
>>> nx.k_truss(H, k=2).nodes
|
514 |
+
NodeView((0, 1, 2, 3, 4, 5))
|
515 |
+
|
516 |
+
References
|
517 |
+
----------
|
518 |
+
.. [1] Bounds and Algorithms for k-truss. Paul Burkhardt, Vance Faber,
|
519 |
+
David G. Harris, 2018. https://arxiv.org/abs/1806.05523v2
|
520 |
+
.. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan
|
521 |
+
Cohen, 2005.
|
522 |
+
"""
|
523 |
+
if nx.number_of_selfloops(G) > 0:
|
524 |
+
msg = (
|
525 |
+
"Input graph has self loops which is not permitted; "
|
526 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
527 |
+
)
|
528 |
+
raise nx.NetworkXNotImplemented(msg)
|
529 |
+
|
530 |
+
H = G.copy()
|
531 |
+
|
532 |
+
n_dropped = 1
|
533 |
+
while n_dropped > 0:
|
534 |
+
n_dropped = 0
|
535 |
+
to_drop = []
|
536 |
+
seen = set()
|
537 |
+
for u in H:
|
538 |
+
nbrs_u = set(H[u])
|
539 |
+
seen.add(u)
|
540 |
+
new_nbrs = [v for v in nbrs_u if v not in seen]
|
541 |
+
for v in new_nbrs:
|
542 |
+
if len(nbrs_u & set(H[v])) < (k - 2):
|
543 |
+
to_drop.append((u, v))
|
544 |
+
H.remove_edges_from(to_drop)
|
545 |
+
n_dropped = len(to_drop)
|
546 |
+
H.remove_nodes_from(list(nx.isolates(H)))
|
547 |
+
|
548 |
+
return H
|
549 |
+
|
550 |
+
|
551 |
+
@nx.utils.not_implemented_for("multigraph")
|
552 |
+
@nx.utils.not_implemented_for("directed")
|
553 |
+
@nx._dispatchable
|
554 |
+
def onion_layers(G):
|
555 |
+
"""Returns the layer of each vertex in an onion decomposition of the graph.
|
556 |
+
|
557 |
+
The onion decomposition refines the k-core decomposition by providing
|
558 |
+
information on the internal organization of each k-shell. It is usually
|
559 |
+
used alongside the `core numbers`.
|
560 |
+
|
561 |
+
Parameters
|
562 |
+
----------
|
563 |
+
G : NetworkX graph
|
564 |
+
An undirected graph without self loops.
|
565 |
+
|
566 |
+
Returns
|
567 |
+
-------
|
568 |
+
od_layers : dictionary
|
569 |
+
A dictionary keyed by node to the onion layer. The layers are
|
570 |
+
contiguous integers starting at 1.
|
571 |
+
|
572 |
+
Raises
|
573 |
+
------
|
574 |
+
NetworkXNotImplemented
|
575 |
+
If `G` is a multigraph or directed graph or if it contains self loops.
|
576 |
+
|
577 |
+
Examples
|
578 |
+
--------
|
579 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
580 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
581 |
+
>>> H.degree
|
582 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
583 |
+
>>> nx.onion_layers(H)
|
584 |
+
{6: 1, 0: 2, 4: 3, 1: 4, 2: 4, 3: 4, 5: 4}
|
585 |
+
|
586 |
+
See Also
|
587 |
+
--------
|
588 |
+
core_number
|
589 |
+
|
590 |
+
References
|
591 |
+
----------
|
592 |
+
.. [1] Multi-scale structure and topological anomaly detection via a new
|
593 |
+
network statistic: The onion decomposition
|
594 |
+
L. Hébert-Dufresne, J. A. Grochow, and A. Allard
|
595 |
+
Scientific Reports 6, 31708 (2016)
|
596 |
+
http://doi.org/10.1038/srep31708
|
597 |
+
.. [2] Percolation and the effective structure of complex networks
|
598 |
+
A. Allard and L. Hébert-Dufresne
|
599 |
+
Physical Review X 9, 011023 (2019)
|
600 |
+
http://doi.org/10.1103/PhysRevX.9.011023
|
601 |
+
"""
|
602 |
+
if nx.number_of_selfloops(G) > 0:
|
603 |
+
msg = (
|
604 |
+
"Input graph contains self loops which is not permitted; "
|
605 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
606 |
+
)
|
607 |
+
raise nx.NetworkXNotImplemented(msg)
|
608 |
+
# Dictionaries to register the k-core/onion decompositions.
|
609 |
+
od_layers = {}
|
610 |
+
# Adjacency list
|
611 |
+
neighbors = {v: list(nx.all_neighbors(G, v)) for v in G}
|
612 |
+
# Effective degree of nodes.
|
613 |
+
degrees = dict(G.degree())
|
614 |
+
# Performs the onion decomposition.
|
615 |
+
current_core = 1
|
616 |
+
current_layer = 1
|
617 |
+
# Sets vertices of degree 0 to layer 1, if any.
|
618 |
+
isolated_nodes = list(nx.isolates(G))
|
619 |
+
if len(isolated_nodes) > 0:
|
620 |
+
for v in isolated_nodes:
|
621 |
+
od_layers[v] = current_layer
|
622 |
+
degrees.pop(v)
|
623 |
+
current_layer = 2
|
624 |
+
# Finds the layer for the remaining nodes.
|
625 |
+
while len(degrees) > 0:
|
626 |
+
# Sets the order for looking at nodes.
|
627 |
+
nodes = sorted(degrees, key=degrees.get)
|
628 |
+
# Sets properly the current core.
|
629 |
+
min_degree = degrees[nodes[0]]
|
630 |
+
if min_degree > current_core:
|
631 |
+
current_core = min_degree
|
632 |
+
# Identifies vertices in the current layer.
|
633 |
+
this_layer = []
|
634 |
+
for n in nodes:
|
635 |
+
if degrees[n] > current_core:
|
636 |
+
break
|
637 |
+
this_layer.append(n)
|
638 |
+
# Identifies the core/layer of the vertices in the current layer.
|
639 |
+
for v in this_layer:
|
640 |
+
od_layers[v] = current_layer
|
641 |
+
for n in neighbors[v]:
|
642 |
+
neighbors[n].remove(v)
|
643 |
+
degrees[n] = degrees[n] - 1
|
644 |
+
degrees.pop(v)
|
645 |
+
# Updates the layer count.
|
646 |
+
current_layer = current_layer + 1
|
647 |
+
# Returns the dictionaries containing the onion layer of each vertices.
|
648 |
+
return od_layers
|
venv/lib/python3.10/site-packages/networkx/algorithms/cuts.py
ADDED
@@ -0,0 +1,400 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for finding and evaluating cuts in a graph.
|
2 |
+
|
3 |
+
"""
|
4 |
+
|
5 |
+
from itertools import chain
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
|
9 |
+
__all__ = [
|
10 |
+
"boundary_expansion",
|
11 |
+
"conductance",
|
12 |
+
"cut_size",
|
13 |
+
"edge_expansion",
|
14 |
+
"mixing_expansion",
|
15 |
+
"node_expansion",
|
16 |
+
"normalized_cut_size",
|
17 |
+
"volume",
|
18 |
+
]
|
19 |
+
|
20 |
+
|
21 |
+
# TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION!
|
22 |
+
|
23 |
+
|
24 |
+
@nx._dispatchable(edge_attrs="weight")
|
25 |
+
def cut_size(G, S, T=None, weight=None):
|
26 |
+
"""Returns the size of the cut between two sets of nodes.
|
27 |
+
|
28 |
+
A *cut* is a partition of the nodes of a graph into two sets. The
|
29 |
+
*cut size* is the sum of the weights of the edges "between" the two
|
30 |
+
sets of nodes.
|
31 |
+
|
32 |
+
Parameters
|
33 |
+
----------
|
34 |
+
G : NetworkX graph
|
35 |
+
|
36 |
+
S : collection
|
37 |
+
A collection of nodes in `G`.
|
38 |
+
|
39 |
+
T : collection
|
40 |
+
A collection of nodes in `G`. If not specified, this is taken to
|
41 |
+
be the set complement of `S`.
|
42 |
+
|
43 |
+
weight : object
|
44 |
+
Edge attribute key to use as weight. If not specified, edges
|
45 |
+
have weight one.
|
46 |
+
|
47 |
+
Returns
|
48 |
+
-------
|
49 |
+
number
|
50 |
+
Total weight of all edges from nodes in set `S` to nodes in
|
51 |
+
set `T` (and, in the case of directed graphs, all edges from
|
52 |
+
nodes in `T` to nodes in `S`).
|
53 |
+
|
54 |
+
Examples
|
55 |
+
--------
|
56 |
+
In the graph with two cliques joined by a single edges, the natural
|
57 |
+
bipartition of the graph into two blocks, one for each clique,
|
58 |
+
yields a cut of weight one::
|
59 |
+
|
60 |
+
>>> G = nx.barbell_graph(3, 0)
|
61 |
+
>>> S = {0, 1, 2}
|
62 |
+
>>> T = {3, 4, 5}
|
63 |
+
>>> nx.cut_size(G, S, T)
|
64 |
+
1
|
65 |
+
|
66 |
+
Each parallel edge in a multigraph is counted when determining the
|
67 |
+
cut size::
|
68 |
+
|
69 |
+
>>> G = nx.MultiGraph(["ab", "ab"])
|
70 |
+
>>> S = {"a"}
|
71 |
+
>>> T = {"b"}
|
72 |
+
>>> nx.cut_size(G, S, T)
|
73 |
+
2
|
74 |
+
|
75 |
+
Notes
|
76 |
+
-----
|
77 |
+
In a multigraph, the cut size is the total weight of edges including
|
78 |
+
multiplicity.
|
79 |
+
|
80 |
+
"""
|
81 |
+
edges = nx.edge_boundary(G, S, T, data=weight, default=1)
|
82 |
+
if G.is_directed():
|
83 |
+
edges = chain(edges, nx.edge_boundary(G, T, S, data=weight, default=1))
|
84 |
+
return sum(weight for u, v, weight in edges)
|
85 |
+
|
86 |
+
|
87 |
+
@nx._dispatchable(edge_attrs="weight")
|
88 |
+
def volume(G, S, weight=None):
|
89 |
+
"""Returns the volume of a set of nodes.
|
90 |
+
|
91 |
+
The *volume* of a set *S* is the sum of the (out-)degrees of nodes
|
92 |
+
in *S* (taking into account parallel edges in multigraphs). [1]
|
93 |
+
|
94 |
+
Parameters
|
95 |
+
----------
|
96 |
+
G : NetworkX graph
|
97 |
+
|
98 |
+
S : collection
|
99 |
+
A collection of nodes in `G`.
|
100 |
+
|
101 |
+
weight : object
|
102 |
+
Edge attribute key to use as weight. If not specified, edges
|
103 |
+
have weight one.
|
104 |
+
|
105 |
+
Returns
|
106 |
+
-------
|
107 |
+
number
|
108 |
+
The volume of the set of nodes represented by `S` in the graph
|
109 |
+
`G`.
|
110 |
+
|
111 |
+
See also
|
112 |
+
--------
|
113 |
+
conductance
|
114 |
+
cut_size
|
115 |
+
edge_expansion
|
116 |
+
edge_boundary
|
117 |
+
normalized_cut_size
|
118 |
+
|
119 |
+
References
|
120 |
+
----------
|
121 |
+
.. [1] David Gleich.
|
122 |
+
*Hierarchical Directed Spectral Graph Partitioning*.
|
123 |
+
<https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
|
124 |
+
|
125 |
+
"""
|
126 |
+
degree = G.out_degree if G.is_directed() else G.degree
|
127 |
+
return sum(d for v, d in degree(S, weight=weight))
|
128 |
+
|
129 |
+
|
130 |
+
@nx._dispatchable(edge_attrs="weight")
|
131 |
+
def normalized_cut_size(G, S, T=None, weight=None):
|
132 |
+
"""Returns the normalized size of the cut between two sets of nodes.
|
133 |
+
|
134 |
+
The *normalized cut size* is the cut size times the sum of the
|
135 |
+
reciprocal sizes of the volumes of the two sets. [1]
|
136 |
+
|
137 |
+
Parameters
|
138 |
+
----------
|
139 |
+
G : NetworkX graph
|
140 |
+
|
141 |
+
S : collection
|
142 |
+
A collection of nodes in `G`.
|
143 |
+
|
144 |
+
T : collection
|
145 |
+
A collection of nodes in `G`.
|
146 |
+
|
147 |
+
weight : object
|
148 |
+
Edge attribute key to use as weight. If not specified, edges
|
149 |
+
have weight one.
|
150 |
+
|
151 |
+
Returns
|
152 |
+
-------
|
153 |
+
number
|
154 |
+
The normalized cut size between the two sets `S` and `T`.
|
155 |
+
|
156 |
+
Notes
|
157 |
+
-----
|
158 |
+
In a multigraph, the cut size is the total weight of edges including
|
159 |
+
multiplicity.
|
160 |
+
|
161 |
+
See also
|
162 |
+
--------
|
163 |
+
conductance
|
164 |
+
cut_size
|
165 |
+
edge_expansion
|
166 |
+
volume
|
167 |
+
|
168 |
+
References
|
169 |
+
----------
|
170 |
+
.. [1] David Gleich.
|
171 |
+
*Hierarchical Directed Spectral Graph Partitioning*.
|
172 |
+
<https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
|
173 |
+
|
174 |
+
"""
|
175 |
+
if T is None:
|
176 |
+
T = set(G) - set(S)
|
177 |
+
num_cut_edges = cut_size(G, S, T=T, weight=weight)
|
178 |
+
volume_S = volume(G, S, weight=weight)
|
179 |
+
volume_T = volume(G, T, weight=weight)
|
180 |
+
return num_cut_edges * ((1 / volume_S) + (1 / volume_T))
|
181 |
+
|
182 |
+
|
183 |
+
@nx._dispatchable(edge_attrs="weight")
|
184 |
+
def conductance(G, S, T=None, weight=None):
|
185 |
+
"""Returns the conductance of two sets of nodes.
|
186 |
+
|
187 |
+
The *conductance* is the quotient of the cut size and the smaller of
|
188 |
+
the volumes of the two sets. [1]
|
189 |
+
|
190 |
+
Parameters
|
191 |
+
----------
|
192 |
+
G : NetworkX graph
|
193 |
+
|
194 |
+
S : collection
|
195 |
+
A collection of nodes in `G`.
|
196 |
+
|
197 |
+
T : collection
|
198 |
+
A collection of nodes in `G`.
|
199 |
+
|
200 |
+
weight : object
|
201 |
+
Edge attribute key to use as weight. If not specified, edges
|
202 |
+
have weight one.
|
203 |
+
|
204 |
+
Returns
|
205 |
+
-------
|
206 |
+
number
|
207 |
+
The conductance between the two sets `S` and `T`.
|
208 |
+
|
209 |
+
See also
|
210 |
+
--------
|
211 |
+
cut_size
|
212 |
+
edge_expansion
|
213 |
+
normalized_cut_size
|
214 |
+
volume
|
215 |
+
|
216 |
+
References
|
217 |
+
----------
|
218 |
+
.. [1] David Gleich.
|
219 |
+
*Hierarchical Directed Spectral Graph Partitioning*.
|
220 |
+
<https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
|
221 |
+
|
222 |
+
"""
|
223 |
+
if T is None:
|
224 |
+
T = set(G) - set(S)
|
225 |
+
num_cut_edges = cut_size(G, S, T, weight=weight)
|
226 |
+
volume_S = volume(G, S, weight=weight)
|
227 |
+
volume_T = volume(G, T, weight=weight)
|
228 |
+
return num_cut_edges / min(volume_S, volume_T)
|
229 |
+
|
230 |
+
|
231 |
+
@nx._dispatchable(edge_attrs="weight")
|
232 |
+
def edge_expansion(G, S, T=None, weight=None):
|
233 |
+
"""Returns the edge expansion between two node sets.
|
234 |
+
|
235 |
+
The *edge expansion* is the quotient of the cut size and the smaller
|
236 |
+
of the cardinalities of the two sets. [1]
|
237 |
+
|
238 |
+
Parameters
|
239 |
+
----------
|
240 |
+
G : NetworkX graph
|
241 |
+
|
242 |
+
S : collection
|
243 |
+
A collection of nodes in `G`.
|
244 |
+
|
245 |
+
T : collection
|
246 |
+
A collection of nodes in `G`.
|
247 |
+
|
248 |
+
weight : object
|
249 |
+
Edge attribute key to use as weight. If not specified, edges
|
250 |
+
have weight one.
|
251 |
+
|
252 |
+
Returns
|
253 |
+
-------
|
254 |
+
number
|
255 |
+
The edge expansion between the two sets `S` and `T`.
|
256 |
+
|
257 |
+
See also
|
258 |
+
--------
|
259 |
+
boundary_expansion
|
260 |
+
mixing_expansion
|
261 |
+
node_expansion
|
262 |
+
|
263 |
+
References
|
264 |
+
----------
|
265 |
+
.. [1] Fan Chung.
|
266 |
+
*Spectral Graph Theory*.
|
267 |
+
(CBMS Regional Conference Series in Mathematics, No. 92),
|
268 |
+
American Mathematical Society, 1997, ISBN 0-8218-0315-8
|
269 |
+
<http://www.math.ucsd.edu/~fan/research/revised.html>
|
270 |
+
|
271 |
+
"""
|
272 |
+
if T is None:
|
273 |
+
T = set(G) - set(S)
|
274 |
+
num_cut_edges = cut_size(G, S, T=T, weight=weight)
|
275 |
+
return num_cut_edges / min(len(S), len(T))
|
276 |
+
|
277 |
+
|
278 |
+
@nx._dispatchable(edge_attrs="weight")
|
279 |
+
def mixing_expansion(G, S, T=None, weight=None):
|
280 |
+
"""Returns the mixing expansion between two node sets.
|
281 |
+
|
282 |
+
The *mixing expansion* is the quotient of the cut size and twice the
|
283 |
+
number of edges in the graph. [1]
|
284 |
+
|
285 |
+
Parameters
|
286 |
+
----------
|
287 |
+
G : NetworkX graph
|
288 |
+
|
289 |
+
S : collection
|
290 |
+
A collection of nodes in `G`.
|
291 |
+
|
292 |
+
T : collection
|
293 |
+
A collection of nodes in `G`.
|
294 |
+
|
295 |
+
weight : object
|
296 |
+
Edge attribute key to use as weight. If not specified, edges
|
297 |
+
have weight one.
|
298 |
+
|
299 |
+
Returns
|
300 |
+
-------
|
301 |
+
number
|
302 |
+
The mixing expansion between the two sets `S` and `T`.
|
303 |
+
|
304 |
+
See also
|
305 |
+
--------
|
306 |
+
boundary_expansion
|
307 |
+
edge_expansion
|
308 |
+
node_expansion
|
309 |
+
|
310 |
+
References
|
311 |
+
----------
|
312 |
+
.. [1] Vadhan, Salil P.
|
313 |
+
"Pseudorandomness."
|
314 |
+
*Foundations and Trends
|
315 |
+
in Theoretical Computer Science* 7.1–3 (2011): 1–336.
|
316 |
+
<https://doi.org/10.1561/0400000010>
|
317 |
+
|
318 |
+
"""
|
319 |
+
num_cut_edges = cut_size(G, S, T=T, weight=weight)
|
320 |
+
num_total_edges = G.number_of_edges()
|
321 |
+
return num_cut_edges / (2 * num_total_edges)
|
322 |
+
|
323 |
+
|
324 |
+
# TODO What is the generalization to two arguments, S and T? Does the
|
325 |
+
# denominator become `min(len(S), len(T))`?
|
326 |
+
@nx._dispatchable
|
327 |
+
def node_expansion(G, S):
|
328 |
+
"""Returns the node expansion of the set `S`.
|
329 |
+
|
330 |
+
The *node expansion* is the quotient of the size of the node
|
331 |
+
boundary of *S* and the cardinality of *S*. [1]
|
332 |
+
|
333 |
+
Parameters
|
334 |
+
----------
|
335 |
+
G : NetworkX graph
|
336 |
+
|
337 |
+
S : collection
|
338 |
+
A collection of nodes in `G`.
|
339 |
+
|
340 |
+
Returns
|
341 |
+
-------
|
342 |
+
number
|
343 |
+
The node expansion of the set `S`.
|
344 |
+
|
345 |
+
See also
|
346 |
+
--------
|
347 |
+
boundary_expansion
|
348 |
+
edge_expansion
|
349 |
+
mixing_expansion
|
350 |
+
|
351 |
+
References
|
352 |
+
----------
|
353 |
+
.. [1] Vadhan, Salil P.
|
354 |
+
"Pseudorandomness."
|
355 |
+
*Foundations and Trends
|
356 |
+
in Theoretical Computer Science* 7.1–3 (2011): 1–336.
|
357 |
+
<https://doi.org/10.1561/0400000010>
|
358 |
+
|
359 |
+
"""
|
360 |
+
neighborhood = set(chain.from_iterable(G.neighbors(v) for v in S))
|
361 |
+
return len(neighborhood) / len(S)
|
362 |
+
|
363 |
+
|
364 |
+
# TODO What is the generalization to two arguments, S and T? Does the
|
365 |
+
# denominator become `min(len(S), len(T))`?
|
366 |
+
@nx._dispatchable
|
367 |
+
def boundary_expansion(G, S):
|
368 |
+
"""Returns the boundary expansion of the set `S`.
|
369 |
+
|
370 |
+
The *boundary expansion* is the quotient of the size
|
371 |
+
of the node boundary and the cardinality of *S*. [1]
|
372 |
+
|
373 |
+
Parameters
|
374 |
+
----------
|
375 |
+
G : NetworkX graph
|
376 |
+
|
377 |
+
S : collection
|
378 |
+
A collection of nodes in `G`.
|
379 |
+
|
380 |
+
Returns
|
381 |
+
-------
|
382 |
+
number
|
383 |
+
The boundary expansion of the set `S`.
|
384 |
+
|
385 |
+
See also
|
386 |
+
--------
|
387 |
+
edge_expansion
|
388 |
+
mixing_expansion
|
389 |
+
node_expansion
|
390 |
+
|
391 |
+
References
|
392 |
+
----------
|
393 |
+
.. [1] Vadhan, Salil P.
|
394 |
+
"Pseudorandomness."
|
395 |
+
*Foundations and Trends in Theoretical Computer Science*
|
396 |
+
7.1–3 (2011): 1–336.
|
397 |
+
<https://doi.org/10.1561/0400000010>
|
398 |
+
|
399 |
+
"""
|
400 |
+
return len(nx.node_boundary(G, S)) / len(S)
|
venv/lib/python3.10/site-packages/networkx/algorithms/cycles.py
ADDED
@@ -0,0 +1,1231 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
========================
|
3 |
+
Cycle finding algorithms
|
4 |
+
========================
|
5 |
+
"""
|
6 |
+
|
7 |
+
from collections import Counter, defaultdict
|
8 |
+
from itertools import combinations, product
|
9 |
+
from math import inf
|
10 |
+
|
11 |
+
import networkx as nx
|
12 |
+
from networkx.utils import not_implemented_for, pairwise
|
13 |
+
|
14 |
+
__all__ = [
|
15 |
+
"cycle_basis",
|
16 |
+
"simple_cycles",
|
17 |
+
"recursive_simple_cycles",
|
18 |
+
"find_cycle",
|
19 |
+
"minimum_cycle_basis",
|
20 |
+
"chordless_cycles",
|
21 |
+
"girth",
|
22 |
+
]
|
23 |
+
|
24 |
+
|
25 |
+
@not_implemented_for("directed")
|
26 |
+
@not_implemented_for("multigraph")
|
27 |
+
@nx._dispatchable
|
28 |
+
def cycle_basis(G, root=None):
|
29 |
+
"""Returns a list of cycles which form a basis for cycles of G.
|
30 |
+
|
31 |
+
A basis for cycles of a network is a minimal collection of
|
32 |
+
cycles such that any cycle in the network can be written
|
33 |
+
as a sum of cycles in the basis. Here summation of cycles
|
34 |
+
is defined as "exclusive or" of the edges. Cycle bases are
|
35 |
+
useful, e.g. when deriving equations for electric circuits
|
36 |
+
using Kirchhoff's Laws.
|
37 |
+
|
38 |
+
Parameters
|
39 |
+
----------
|
40 |
+
G : NetworkX Graph
|
41 |
+
root : node, optional
|
42 |
+
Specify starting node for basis.
|
43 |
+
|
44 |
+
Returns
|
45 |
+
-------
|
46 |
+
A list of cycle lists. Each cycle list is a list of nodes
|
47 |
+
which forms a cycle (loop) in G.
|
48 |
+
|
49 |
+
Examples
|
50 |
+
--------
|
51 |
+
>>> G = nx.Graph()
|
52 |
+
>>> nx.add_cycle(G, [0, 1, 2, 3])
|
53 |
+
>>> nx.add_cycle(G, [0, 3, 4, 5])
|
54 |
+
>>> nx.cycle_basis(G, 0)
|
55 |
+
[[3, 4, 5, 0], [1, 2, 3, 0]]
|
56 |
+
|
57 |
+
Notes
|
58 |
+
-----
|
59 |
+
This is adapted from algorithm CACM 491 [1]_.
|
60 |
+
|
61 |
+
References
|
62 |
+
----------
|
63 |
+
.. [1] Paton, K. An algorithm for finding a fundamental set of
|
64 |
+
cycles of a graph. Comm. ACM 12, 9 (Sept 1969), 514-518.
|
65 |
+
|
66 |
+
See Also
|
67 |
+
--------
|
68 |
+
simple_cycles
|
69 |
+
minimum_cycle_basis
|
70 |
+
"""
|
71 |
+
gnodes = dict.fromkeys(G) # set-like object that maintains node order
|
72 |
+
cycles = []
|
73 |
+
while gnodes: # loop over connected components
|
74 |
+
if root is None:
|
75 |
+
root = gnodes.popitem()[0]
|
76 |
+
stack = [root]
|
77 |
+
pred = {root: root}
|
78 |
+
used = {root: set()}
|
79 |
+
while stack: # walk the spanning tree finding cycles
|
80 |
+
z = stack.pop() # use last-in so cycles easier to find
|
81 |
+
zused = used[z]
|
82 |
+
for nbr in G[z]:
|
83 |
+
if nbr not in used: # new node
|
84 |
+
pred[nbr] = z
|
85 |
+
stack.append(nbr)
|
86 |
+
used[nbr] = {z}
|
87 |
+
elif nbr == z: # self loops
|
88 |
+
cycles.append([z])
|
89 |
+
elif nbr not in zused: # found a cycle
|
90 |
+
pn = used[nbr]
|
91 |
+
cycle = [nbr, z]
|
92 |
+
p = pred[z]
|
93 |
+
while p not in pn:
|
94 |
+
cycle.append(p)
|
95 |
+
p = pred[p]
|
96 |
+
cycle.append(p)
|
97 |
+
cycles.append(cycle)
|
98 |
+
used[nbr].add(z)
|
99 |
+
for node in pred:
|
100 |
+
gnodes.pop(node, None)
|
101 |
+
root = None
|
102 |
+
return cycles
|
103 |
+
|
104 |
+
|
105 |
+
@nx._dispatchable
|
106 |
+
def simple_cycles(G, length_bound=None):
|
107 |
+
"""Find simple cycles (elementary circuits) of a graph.
|
108 |
+
|
109 |
+
A `simple cycle`, or `elementary circuit`, is a closed path where
|
110 |
+
no node appears twice. In a directed graph, two simple cycles are distinct
|
111 |
+
if they are not cyclic permutations of each other. In an undirected graph,
|
112 |
+
two simple cycles are distinct if they are not cyclic permutations of each
|
113 |
+
other nor of the other's reversal.
|
114 |
+
|
115 |
+
Optionally, the cycles are bounded in length. In the unbounded case, we use
|
116 |
+
a nonrecursive, iterator/generator version of Johnson's algorithm [1]_. In
|
117 |
+
the bounded case, we use a version of the algorithm of Gupta and
|
118 |
+
Suzumura[2]_. There may be better algorithms for some cases [3]_ [4]_ [5]_.
|
119 |
+
|
120 |
+
The algorithms of Johnson, and Gupta and Suzumura, are enhanced by some
|
121 |
+
well-known preprocessing techniques. When G is directed, we restrict our
|
122 |
+
attention to strongly connected components of G, generate all simple cycles
|
123 |
+
containing a certain node, remove that node, and further decompose the
|
124 |
+
remainder into strongly connected components. When G is undirected, we
|
125 |
+
restrict our attention to biconnected components, generate all simple cycles
|
126 |
+
containing a particular edge, remove that edge, and further decompose the
|
127 |
+
remainder into biconnected components.
|
128 |
+
|
129 |
+
Note that multigraphs are supported by this function -- and in undirected
|
130 |
+
multigraphs, a pair of parallel edges is considered a cycle of length 2.
|
131 |
+
Likewise, self-loops are considered to be cycles of length 1. We define
|
132 |
+
cycles as sequences of nodes; so the presence of loops and parallel edges
|
133 |
+
does not change the number of simple cycles in a graph.
|
134 |
+
|
135 |
+
Parameters
|
136 |
+
----------
|
137 |
+
G : NetworkX DiGraph
|
138 |
+
A directed graph
|
139 |
+
|
140 |
+
length_bound : int or None, optional (default=None)
|
141 |
+
If length_bound is an int, generate all simple cycles of G with length at
|
142 |
+
most length_bound. Otherwise, generate all simple cycles of G.
|
143 |
+
|
144 |
+
Yields
|
145 |
+
------
|
146 |
+
list of nodes
|
147 |
+
Each cycle is represented by a list of nodes along the cycle.
|
148 |
+
|
149 |
+
Examples
|
150 |
+
--------
|
151 |
+
>>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
|
152 |
+
>>> G = nx.DiGraph(edges)
|
153 |
+
>>> sorted(nx.simple_cycles(G))
|
154 |
+
[[0], [0, 1, 2], [0, 2], [1, 2], [2]]
|
155 |
+
|
156 |
+
To filter the cycles so that they don't include certain nodes or edges,
|
157 |
+
copy your graph and eliminate those nodes or edges before calling.
|
158 |
+
For example, to exclude self-loops from the above example:
|
159 |
+
|
160 |
+
>>> H = G.copy()
|
161 |
+
>>> H.remove_edges_from(nx.selfloop_edges(G))
|
162 |
+
>>> sorted(nx.simple_cycles(H))
|
163 |
+
[[0, 1, 2], [0, 2], [1, 2]]
|
164 |
+
|
165 |
+
Notes
|
166 |
+
-----
|
167 |
+
When length_bound is None, the time complexity is $O((n+e)(c+1))$ for $n$
|
168 |
+
nodes, $e$ edges and $c$ simple circuits. Otherwise, when length_bound > 1,
|
169 |
+
the time complexity is $O((c+n)(k-1)d^k)$ where $d$ is the average degree of
|
170 |
+
the nodes of G and $k$ = length_bound.
|
171 |
+
|
172 |
+
Raises
|
173 |
+
------
|
174 |
+
ValueError
|
175 |
+
when length_bound < 0.
|
176 |
+
|
177 |
+
References
|
178 |
+
----------
|
179 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
180 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
181 |
+
https://doi.org/10.1137/0204007
|
182 |
+
.. [2] Finding All Bounded-Length Simple Cycles in a Directed Graph
|
183 |
+
A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
|
184 |
+
.. [3] Enumerating the cycles of a digraph: a new preprocessing strategy.
|
185 |
+
G. Loizou and P. Thanish, Information Sciences, v. 27, 163-182, 1982.
|
186 |
+
.. [4] A search strategy for the elementary cycles of a directed graph.
|
187 |
+
J.L. Szwarcfiter and P.E. Lauer, BIT NUMERICAL MATHEMATICS,
|
188 |
+
v. 16, no. 2, 192-204, 1976.
|
189 |
+
.. [5] Optimal Listing of Cycles and st-Paths in Undirected Graphs
|
190 |
+
R. Ferreira and R. Grossi and A. Marino and N. Pisanti and R. Rizzi and
|
191 |
+
G. Sacomoto https://arxiv.org/abs/1205.2766
|
192 |
+
|
193 |
+
See Also
|
194 |
+
--------
|
195 |
+
cycle_basis
|
196 |
+
chordless_cycles
|
197 |
+
"""
|
198 |
+
|
199 |
+
if length_bound is not None:
|
200 |
+
if length_bound == 0:
|
201 |
+
return
|
202 |
+
elif length_bound < 0:
|
203 |
+
raise ValueError("length bound must be non-negative")
|
204 |
+
|
205 |
+
directed = G.is_directed()
|
206 |
+
yield from ([v] for v, Gv in G.adj.items() if v in Gv)
|
207 |
+
|
208 |
+
if length_bound is not None and length_bound == 1:
|
209 |
+
return
|
210 |
+
|
211 |
+
if G.is_multigraph() and not directed:
|
212 |
+
visited = set()
|
213 |
+
for u, Gu in G.adj.items():
|
214 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
|
215 |
+
yield from ([u, v] for v, m in multiplicity if m > 1)
|
216 |
+
visited.add(u)
|
217 |
+
|
218 |
+
# explicitly filter out loops; implicitly filter out parallel edges
|
219 |
+
if directed:
|
220 |
+
G = nx.DiGraph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
|
221 |
+
else:
|
222 |
+
G = nx.Graph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
|
223 |
+
|
224 |
+
# this case is not strictly necessary but improves performance
|
225 |
+
if length_bound is not None and length_bound == 2:
|
226 |
+
if directed:
|
227 |
+
visited = set()
|
228 |
+
for u, Gu in G.adj.items():
|
229 |
+
yield from (
|
230 |
+
[v, u] for v in visited.intersection(Gu) if G.has_edge(v, u)
|
231 |
+
)
|
232 |
+
visited.add(u)
|
233 |
+
return
|
234 |
+
|
235 |
+
if directed:
|
236 |
+
yield from _directed_cycle_search(G, length_bound)
|
237 |
+
else:
|
238 |
+
yield from _undirected_cycle_search(G, length_bound)
|
239 |
+
|
240 |
+
|
241 |
+
def _directed_cycle_search(G, length_bound):
|
242 |
+
"""A dispatch function for `simple_cycles` for directed graphs.
|
243 |
+
|
244 |
+
We generate all cycles of G through binary partition.
|
245 |
+
|
246 |
+
1. Pick a node v in G which belongs to at least one cycle
|
247 |
+
a. Generate all cycles of G which contain the node v.
|
248 |
+
b. Recursively generate all cycles of G \\ v.
|
249 |
+
|
250 |
+
This is accomplished through the following:
|
251 |
+
|
252 |
+
1. Compute the strongly connected components SCC of G.
|
253 |
+
2. Select and remove a biconnected component C from BCC. Select a
|
254 |
+
non-tree edge (u, v) of a depth-first search of G[C].
|
255 |
+
3. For each simple cycle P containing v in G[C], yield P.
|
256 |
+
4. Add the biconnected components of G[C \\ v] to BCC.
|
257 |
+
|
258 |
+
If the parameter length_bound is not None, then step 3 will be limited to
|
259 |
+
simple cycles of length at most length_bound.
|
260 |
+
|
261 |
+
Parameters
|
262 |
+
----------
|
263 |
+
G : NetworkX DiGraph
|
264 |
+
A directed graph
|
265 |
+
|
266 |
+
length_bound : int or None
|
267 |
+
If length_bound is an int, generate all simple cycles of G with length at most length_bound.
|
268 |
+
Otherwise, generate all simple cycles of G.
|
269 |
+
|
270 |
+
Yields
|
271 |
+
------
|
272 |
+
list of nodes
|
273 |
+
Each cycle is represented by a list of nodes along the cycle.
|
274 |
+
"""
|
275 |
+
|
276 |
+
scc = nx.strongly_connected_components
|
277 |
+
components = [c for c in scc(G) if len(c) >= 2]
|
278 |
+
while components:
|
279 |
+
c = components.pop()
|
280 |
+
Gc = G.subgraph(c)
|
281 |
+
v = next(iter(c))
|
282 |
+
if length_bound is None:
|
283 |
+
yield from _johnson_cycle_search(Gc, [v])
|
284 |
+
else:
|
285 |
+
yield from _bounded_cycle_search(Gc, [v], length_bound)
|
286 |
+
# delete v after searching G, to make sure we can find v
|
287 |
+
G.remove_node(v)
|
288 |
+
components.extend(c for c in scc(Gc) if len(c) >= 2)
|
289 |
+
|
290 |
+
|
291 |
+
def _undirected_cycle_search(G, length_bound):
|
292 |
+
"""A dispatch function for `simple_cycles` for undirected graphs.
|
293 |
+
|
294 |
+
We generate all cycles of G through binary partition.
|
295 |
+
|
296 |
+
1. Pick an edge (u, v) in G which belongs to at least one cycle
|
297 |
+
a. Generate all cycles of G which contain the edge (u, v)
|
298 |
+
b. Recursively generate all cycles of G \\ (u, v)
|
299 |
+
|
300 |
+
This is accomplished through the following:
|
301 |
+
|
302 |
+
1. Compute the biconnected components BCC of G.
|
303 |
+
2. Select and remove a biconnected component C from BCC. Select a
|
304 |
+
non-tree edge (u, v) of a depth-first search of G[C].
|
305 |
+
3. For each (v -> u) path P remaining in G[C] \\ (u, v), yield P.
|
306 |
+
4. Add the biconnected components of G[C] \\ (u, v) to BCC.
|
307 |
+
|
308 |
+
If the parameter length_bound is not None, then step 3 will be limited to simple paths
|
309 |
+
of length at most length_bound.
|
310 |
+
|
311 |
+
Parameters
|
312 |
+
----------
|
313 |
+
G : NetworkX Graph
|
314 |
+
An undirected graph
|
315 |
+
|
316 |
+
length_bound : int or None
|
317 |
+
If length_bound is an int, generate all simple cycles of G with length at most length_bound.
|
318 |
+
Otherwise, generate all simple cycles of G.
|
319 |
+
|
320 |
+
Yields
|
321 |
+
------
|
322 |
+
list of nodes
|
323 |
+
Each cycle is represented by a list of nodes along the cycle.
|
324 |
+
"""
|
325 |
+
|
326 |
+
bcc = nx.biconnected_components
|
327 |
+
components = [c for c in bcc(G) if len(c) >= 3]
|
328 |
+
while components:
|
329 |
+
c = components.pop()
|
330 |
+
Gc = G.subgraph(c)
|
331 |
+
uv = list(next(iter(Gc.edges)))
|
332 |
+
G.remove_edge(*uv)
|
333 |
+
# delete (u, v) before searching G, to avoid fake 3-cycles [u, v, u]
|
334 |
+
if length_bound is None:
|
335 |
+
yield from _johnson_cycle_search(Gc, uv)
|
336 |
+
else:
|
337 |
+
yield from _bounded_cycle_search(Gc, uv, length_bound)
|
338 |
+
components.extend(c for c in bcc(Gc) if len(c) >= 3)
|
339 |
+
|
340 |
+
|
341 |
+
class _NeighborhoodCache(dict):
|
342 |
+
"""Very lightweight graph wrapper which caches neighborhoods as list.
|
343 |
+
|
344 |
+
This dict subclass uses the __missing__ functionality to query graphs for
|
345 |
+
their neighborhoods, and store the result as a list. This is used to avoid
|
346 |
+
the performance penalty incurred by subgraph views.
|
347 |
+
"""
|
348 |
+
|
349 |
+
def __init__(self, G):
|
350 |
+
self.G = G
|
351 |
+
|
352 |
+
def __missing__(self, v):
|
353 |
+
Gv = self[v] = list(self.G[v])
|
354 |
+
return Gv
|
355 |
+
|
356 |
+
|
357 |
+
def _johnson_cycle_search(G, path):
|
358 |
+
"""The main loop of the cycle-enumeration algorithm of Johnson.
|
359 |
+
|
360 |
+
Parameters
|
361 |
+
----------
|
362 |
+
G : NetworkX Graph or DiGraph
|
363 |
+
A graph
|
364 |
+
|
365 |
+
path : list
|
366 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
367 |
+
|
368 |
+
Yields
|
369 |
+
------
|
370 |
+
list of nodes
|
371 |
+
Each cycle is represented by a list of nodes along the cycle.
|
372 |
+
|
373 |
+
References
|
374 |
+
----------
|
375 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
376 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
377 |
+
https://doi.org/10.1137/0204007
|
378 |
+
|
379 |
+
"""
|
380 |
+
|
381 |
+
G = _NeighborhoodCache(G)
|
382 |
+
blocked = set(path)
|
383 |
+
B = defaultdict(set) # graph portions that yield no elementary circuit
|
384 |
+
start = path[0]
|
385 |
+
stack = [iter(G[path[-1]])]
|
386 |
+
closed = [False]
|
387 |
+
while stack:
|
388 |
+
nbrs = stack[-1]
|
389 |
+
for w in nbrs:
|
390 |
+
if w == start:
|
391 |
+
yield path[:]
|
392 |
+
closed[-1] = True
|
393 |
+
elif w not in blocked:
|
394 |
+
path.append(w)
|
395 |
+
closed.append(False)
|
396 |
+
stack.append(iter(G[w]))
|
397 |
+
blocked.add(w)
|
398 |
+
break
|
399 |
+
else: # no more nbrs
|
400 |
+
stack.pop()
|
401 |
+
v = path.pop()
|
402 |
+
if closed.pop():
|
403 |
+
if closed:
|
404 |
+
closed[-1] = True
|
405 |
+
unblock_stack = {v}
|
406 |
+
while unblock_stack:
|
407 |
+
u = unblock_stack.pop()
|
408 |
+
if u in blocked:
|
409 |
+
blocked.remove(u)
|
410 |
+
unblock_stack.update(B[u])
|
411 |
+
B[u].clear()
|
412 |
+
else:
|
413 |
+
for w in G[v]:
|
414 |
+
B[w].add(v)
|
415 |
+
|
416 |
+
|
417 |
+
def _bounded_cycle_search(G, path, length_bound):
|
418 |
+
"""The main loop of the cycle-enumeration algorithm of Gupta and Suzumura.
|
419 |
+
|
420 |
+
Parameters
|
421 |
+
----------
|
422 |
+
G : NetworkX Graph or DiGraph
|
423 |
+
A graph
|
424 |
+
|
425 |
+
path : list
|
426 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
427 |
+
|
428 |
+
length_bound: int
|
429 |
+
A length bound. All cycles generated will have length at most length_bound.
|
430 |
+
|
431 |
+
Yields
|
432 |
+
------
|
433 |
+
list of nodes
|
434 |
+
Each cycle is represented by a list of nodes along the cycle.
|
435 |
+
|
436 |
+
References
|
437 |
+
----------
|
438 |
+
.. [1] Finding All Bounded-Length Simple Cycles in a Directed Graph
|
439 |
+
A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
|
440 |
+
|
441 |
+
"""
|
442 |
+
G = _NeighborhoodCache(G)
|
443 |
+
lock = {v: 0 for v in path}
|
444 |
+
B = defaultdict(set)
|
445 |
+
start = path[0]
|
446 |
+
stack = [iter(G[path[-1]])]
|
447 |
+
blen = [length_bound]
|
448 |
+
while stack:
|
449 |
+
nbrs = stack[-1]
|
450 |
+
for w in nbrs:
|
451 |
+
if w == start:
|
452 |
+
yield path[:]
|
453 |
+
blen[-1] = 1
|
454 |
+
elif len(path) < lock.get(w, length_bound):
|
455 |
+
path.append(w)
|
456 |
+
blen.append(length_bound)
|
457 |
+
lock[w] = len(path)
|
458 |
+
stack.append(iter(G[w]))
|
459 |
+
break
|
460 |
+
else:
|
461 |
+
stack.pop()
|
462 |
+
v = path.pop()
|
463 |
+
bl = blen.pop()
|
464 |
+
if blen:
|
465 |
+
blen[-1] = min(blen[-1], bl)
|
466 |
+
if bl < length_bound:
|
467 |
+
relax_stack = [(bl, v)]
|
468 |
+
while relax_stack:
|
469 |
+
bl, u = relax_stack.pop()
|
470 |
+
if lock.get(u, length_bound) < length_bound - bl + 1:
|
471 |
+
lock[u] = length_bound - bl + 1
|
472 |
+
relax_stack.extend((bl + 1, w) for w in B[u].difference(path))
|
473 |
+
else:
|
474 |
+
for w in G[v]:
|
475 |
+
B[w].add(v)
|
476 |
+
|
477 |
+
|
478 |
+
@nx._dispatchable
|
479 |
+
def chordless_cycles(G, length_bound=None):
|
480 |
+
"""Find simple chordless cycles of a graph.
|
481 |
+
|
482 |
+
A `simple cycle` is a closed path where no node appears twice. In a simple
|
483 |
+
cycle, a `chord` is an additional edge between two nodes in the cycle. A
|
484 |
+
`chordless cycle` is a simple cycle without chords. Said differently, a
|
485 |
+
chordless cycle is a cycle C in a graph G where the number of edges in the
|
486 |
+
induced graph G[C] is equal to the length of `C`.
|
487 |
+
|
488 |
+
Note that some care must be taken in the case that G is not a simple graph
|
489 |
+
nor a simple digraph. Some authors limit the definition of chordless cycles
|
490 |
+
to have a prescribed minimum length; we do not.
|
491 |
+
|
492 |
+
1. We interpret self-loops to be chordless cycles, except in multigraphs
|
493 |
+
with multiple loops in parallel. Likewise, in a chordless cycle of
|
494 |
+
length greater than 1, there can be no nodes with self-loops.
|
495 |
+
|
496 |
+
2. We interpret directed two-cycles to be chordless cycles, except in
|
497 |
+
multi-digraphs when any edge in a two-cycle has a parallel copy.
|
498 |
+
|
499 |
+
3. We interpret parallel pairs of undirected edges as two-cycles, except
|
500 |
+
when a third (or more) parallel edge exists between the two nodes.
|
501 |
+
|
502 |
+
4. Generalizing the above, edges with parallel clones may not occur in
|
503 |
+
chordless cycles.
|
504 |
+
|
505 |
+
In a directed graph, two chordless cycles are distinct if they are not
|
506 |
+
cyclic permutations of each other. In an undirected graph, two chordless
|
507 |
+
cycles are distinct if they are not cyclic permutations of each other nor of
|
508 |
+
the other's reversal.
|
509 |
+
|
510 |
+
Optionally, the cycles are bounded in length.
|
511 |
+
|
512 |
+
We use an algorithm strongly inspired by that of Dias et al [1]_. It has
|
513 |
+
been modified in the following ways:
|
514 |
+
|
515 |
+
1. Recursion is avoided, per Python's limitations
|
516 |
+
|
517 |
+
2. The labeling function is not necessary, because the starting paths
|
518 |
+
are chosen (and deleted from the host graph) to prevent multiple
|
519 |
+
occurrences of the same path
|
520 |
+
|
521 |
+
3. The search is optionally bounded at a specified length
|
522 |
+
|
523 |
+
4. Support for directed graphs is provided by extending cycles along
|
524 |
+
forward edges, and blocking nodes along forward and reverse edges
|
525 |
+
|
526 |
+
5. Support for multigraphs is provided by omitting digons from the set
|
527 |
+
of forward edges
|
528 |
+
|
529 |
+
Parameters
|
530 |
+
----------
|
531 |
+
G : NetworkX DiGraph
|
532 |
+
A directed graph
|
533 |
+
|
534 |
+
length_bound : int or None, optional (default=None)
|
535 |
+
If length_bound is an int, generate all simple cycles of G with length at
|
536 |
+
most length_bound. Otherwise, generate all simple cycles of G.
|
537 |
+
|
538 |
+
Yields
|
539 |
+
------
|
540 |
+
list of nodes
|
541 |
+
Each cycle is represented by a list of nodes along the cycle.
|
542 |
+
|
543 |
+
Examples
|
544 |
+
--------
|
545 |
+
>>> sorted(list(nx.chordless_cycles(nx.complete_graph(4))))
|
546 |
+
[[1, 0, 2], [1, 0, 3], [2, 0, 3], [2, 1, 3]]
|
547 |
+
|
548 |
+
Notes
|
549 |
+
-----
|
550 |
+
When length_bound is None, and the graph is simple, the time complexity is
|
551 |
+
$O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$ chordless cycles.
|
552 |
+
|
553 |
+
Raises
|
554 |
+
------
|
555 |
+
ValueError
|
556 |
+
when length_bound < 0.
|
557 |
+
|
558 |
+
References
|
559 |
+
----------
|
560 |
+
.. [1] Efficient enumeration of chordless cycles
|
561 |
+
E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
|
562 |
+
https://arxiv.org/abs/1309.1051
|
563 |
+
|
564 |
+
See Also
|
565 |
+
--------
|
566 |
+
simple_cycles
|
567 |
+
"""
|
568 |
+
|
569 |
+
if length_bound is not None:
|
570 |
+
if length_bound == 0:
|
571 |
+
return
|
572 |
+
elif length_bound < 0:
|
573 |
+
raise ValueError("length bound must be non-negative")
|
574 |
+
|
575 |
+
directed = G.is_directed()
|
576 |
+
multigraph = G.is_multigraph()
|
577 |
+
|
578 |
+
if multigraph:
|
579 |
+
yield from ([v] for v, Gv in G.adj.items() if len(Gv.get(v, ())) == 1)
|
580 |
+
else:
|
581 |
+
yield from ([v] for v, Gv in G.adj.items() if v in Gv)
|
582 |
+
|
583 |
+
if length_bound is not None and length_bound == 1:
|
584 |
+
return
|
585 |
+
|
586 |
+
# Nodes with loops cannot belong to longer cycles. Let's delete them here.
|
587 |
+
# also, we implicitly reduce the multiplicity of edges down to 1 in the case
|
588 |
+
# of multiedges.
|
589 |
+
if directed:
|
590 |
+
F = nx.DiGraph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
|
591 |
+
B = F.to_undirected(as_view=False)
|
592 |
+
else:
|
593 |
+
F = nx.Graph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
|
594 |
+
B = None
|
595 |
+
|
596 |
+
# If we're given a multigraph, we have a few cases to consider with parallel
|
597 |
+
# edges.
|
598 |
+
#
|
599 |
+
# 1. If we have 2 or more edges in parallel between the nodes (u, v), we
|
600 |
+
# must not construct longer cycles along (u, v).
|
601 |
+
# 2. If G is not directed, then a pair of parallel edges between (u, v) is a
|
602 |
+
# chordless cycle unless there exists a third (or more) parallel edge.
|
603 |
+
# 3. If G is directed, then parallel edges do not form cycles, but do
|
604 |
+
# preclude back-edges from forming cycles (handled in the next section),
|
605 |
+
# Thus, if an edge (u, v) is duplicated and the reverse (v, u) is also
|
606 |
+
# present, then we remove both from F.
|
607 |
+
#
|
608 |
+
# In directed graphs, we need to consider both directions that edges can
|
609 |
+
# take, so iterate over all edges (u, v) and possibly (v, u). In undirected
|
610 |
+
# graphs, we need to be a little careful to only consider every edge once,
|
611 |
+
# so we use a "visited" set to emulate node-order comparisons.
|
612 |
+
|
613 |
+
if multigraph:
|
614 |
+
if not directed:
|
615 |
+
B = F.copy()
|
616 |
+
visited = set()
|
617 |
+
for u, Gu in G.adj.items():
|
618 |
+
if directed:
|
619 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items())
|
620 |
+
for v, m in multiplicity:
|
621 |
+
if m > 1:
|
622 |
+
F.remove_edges_from(((u, v), (v, u)))
|
623 |
+
else:
|
624 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
|
625 |
+
for v, m in multiplicity:
|
626 |
+
if m == 2:
|
627 |
+
yield [u, v]
|
628 |
+
if m > 1:
|
629 |
+
F.remove_edge(u, v)
|
630 |
+
visited.add(u)
|
631 |
+
|
632 |
+
# If we're given a directed graphs, we need to think about digons. If we
|
633 |
+
# have two edges (u, v) and (v, u), then that's a two-cycle. If either edge
|
634 |
+
# was duplicated above, then we removed both from F. So, any digons we find
|
635 |
+
# here are chordless. After finding digons, we remove their edges from F
|
636 |
+
# to avoid traversing them in the search for chordless cycles.
|
637 |
+
if directed:
|
638 |
+
for u, Fu in F.adj.items():
|
639 |
+
digons = [[u, v] for v in Fu if F.has_edge(v, u)]
|
640 |
+
yield from digons
|
641 |
+
F.remove_edges_from(digons)
|
642 |
+
F.remove_edges_from(e[::-1] for e in digons)
|
643 |
+
|
644 |
+
if length_bound is not None and length_bound == 2:
|
645 |
+
return
|
646 |
+
|
647 |
+
# Now, we prepare to search for cycles. We have removed all cycles of
|
648 |
+
# lengths 1 and 2, so F is a simple graph or simple digraph. We repeatedly
|
649 |
+
# separate digraphs into their strongly connected components, and undirected
|
650 |
+
# graphs into their biconnected components. For each component, we pick a
|
651 |
+
# node v, search for chordless cycles based at each "stem" (u, v, w), and
|
652 |
+
# then remove v from that component before separating the graph again.
|
653 |
+
if directed:
|
654 |
+
separate = nx.strongly_connected_components
|
655 |
+
|
656 |
+
# Directed stems look like (u -> v -> w), so we use the product of
|
657 |
+
# predecessors of v with successors of v.
|
658 |
+
def stems(C, v):
|
659 |
+
for u, w in product(C.pred[v], C.succ[v]):
|
660 |
+
if not G.has_edge(u, w): # omit stems with acyclic chords
|
661 |
+
yield [u, v, w], F.has_edge(w, u)
|
662 |
+
|
663 |
+
else:
|
664 |
+
separate = nx.biconnected_components
|
665 |
+
|
666 |
+
# Undirected stems look like (u ~ v ~ w), but we must not also search
|
667 |
+
# (w ~ v ~ u), so we use combinations of v's neighbors of length 2.
|
668 |
+
def stems(C, v):
|
669 |
+
yield from (([u, v, w], F.has_edge(w, u)) for u, w in combinations(C[v], 2))
|
670 |
+
|
671 |
+
components = [c for c in separate(F) if len(c) > 2]
|
672 |
+
while components:
|
673 |
+
c = components.pop()
|
674 |
+
v = next(iter(c))
|
675 |
+
Fc = F.subgraph(c)
|
676 |
+
Fcc = Bcc = None
|
677 |
+
for S, is_triangle in stems(Fc, v):
|
678 |
+
if is_triangle:
|
679 |
+
yield S
|
680 |
+
else:
|
681 |
+
if Fcc is None:
|
682 |
+
Fcc = _NeighborhoodCache(Fc)
|
683 |
+
Bcc = Fcc if B is None else _NeighborhoodCache(B.subgraph(c))
|
684 |
+
yield from _chordless_cycle_search(Fcc, Bcc, S, length_bound)
|
685 |
+
|
686 |
+
components.extend(c for c in separate(F.subgraph(c - {v})) if len(c) > 2)
|
687 |
+
|
688 |
+
|
689 |
+
def _chordless_cycle_search(F, B, path, length_bound):
|
690 |
+
"""The main loop for chordless cycle enumeration.
|
691 |
+
|
692 |
+
This algorithm is strongly inspired by that of Dias et al [1]_. It has been
|
693 |
+
modified in the following ways:
|
694 |
+
|
695 |
+
1. Recursion is avoided, per Python's limitations
|
696 |
+
|
697 |
+
2. The labeling function is not necessary, because the starting paths
|
698 |
+
are chosen (and deleted from the host graph) to prevent multiple
|
699 |
+
occurrences of the same path
|
700 |
+
|
701 |
+
3. The search is optionally bounded at a specified length
|
702 |
+
|
703 |
+
4. Support for directed graphs is provided by extending cycles along
|
704 |
+
forward edges, and blocking nodes along forward and reverse edges
|
705 |
+
|
706 |
+
5. Support for multigraphs is provided by omitting digons from the set
|
707 |
+
of forward edges
|
708 |
+
|
709 |
+
Parameters
|
710 |
+
----------
|
711 |
+
F : _NeighborhoodCache
|
712 |
+
A graph of forward edges to follow in constructing cycles
|
713 |
+
|
714 |
+
B : _NeighborhoodCache
|
715 |
+
A graph of blocking edges to prevent the production of chordless cycles
|
716 |
+
|
717 |
+
path : list
|
718 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
719 |
+
|
720 |
+
length_bound : int
|
721 |
+
A length bound. All cycles generated will have length at most length_bound.
|
722 |
+
|
723 |
+
|
724 |
+
Yields
|
725 |
+
------
|
726 |
+
list of nodes
|
727 |
+
Each cycle is represented by a list of nodes along the cycle.
|
728 |
+
|
729 |
+
References
|
730 |
+
----------
|
731 |
+
.. [1] Efficient enumeration of chordless cycles
|
732 |
+
E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
|
733 |
+
https://arxiv.org/abs/1309.1051
|
734 |
+
|
735 |
+
"""
|
736 |
+
blocked = defaultdict(int)
|
737 |
+
target = path[0]
|
738 |
+
blocked[path[1]] = 1
|
739 |
+
for w in path[1:]:
|
740 |
+
for v in B[w]:
|
741 |
+
blocked[v] += 1
|
742 |
+
|
743 |
+
stack = [iter(F[path[2]])]
|
744 |
+
while stack:
|
745 |
+
nbrs = stack[-1]
|
746 |
+
for w in nbrs:
|
747 |
+
if blocked[w] == 1 and (length_bound is None or len(path) < length_bound):
|
748 |
+
Fw = F[w]
|
749 |
+
if target in Fw:
|
750 |
+
yield path + [w]
|
751 |
+
else:
|
752 |
+
Bw = B[w]
|
753 |
+
if target in Bw:
|
754 |
+
continue
|
755 |
+
for v in Bw:
|
756 |
+
blocked[v] += 1
|
757 |
+
path.append(w)
|
758 |
+
stack.append(iter(Fw))
|
759 |
+
break
|
760 |
+
else:
|
761 |
+
stack.pop()
|
762 |
+
for v in B[path.pop()]:
|
763 |
+
blocked[v] -= 1
|
764 |
+
|
765 |
+
|
766 |
+
@not_implemented_for("undirected")
|
767 |
+
@nx._dispatchable(mutates_input=True)
|
768 |
+
def recursive_simple_cycles(G):
|
769 |
+
"""Find simple cycles (elementary circuits) of a directed graph.
|
770 |
+
|
771 |
+
A `simple cycle`, or `elementary circuit`, is a closed path where
|
772 |
+
no node appears twice. Two elementary circuits are distinct if they
|
773 |
+
are not cyclic permutations of each other.
|
774 |
+
|
775 |
+
This version uses a recursive algorithm to build a list of cycles.
|
776 |
+
You should probably use the iterator version called simple_cycles().
|
777 |
+
Warning: This recursive version uses lots of RAM!
|
778 |
+
It appears in NetworkX for pedagogical value.
|
779 |
+
|
780 |
+
Parameters
|
781 |
+
----------
|
782 |
+
G : NetworkX DiGraph
|
783 |
+
A directed graph
|
784 |
+
|
785 |
+
Returns
|
786 |
+
-------
|
787 |
+
A list of cycles, where each cycle is represented by a list of nodes
|
788 |
+
along the cycle.
|
789 |
+
|
790 |
+
Example:
|
791 |
+
|
792 |
+
>>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
|
793 |
+
>>> G = nx.DiGraph(edges)
|
794 |
+
>>> nx.recursive_simple_cycles(G)
|
795 |
+
[[0], [2], [0, 1, 2], [0, 2], [1, 2]]
|
796 |
+
|
797 |
+
Notes
|
798 |
+
-----
|
799 |
+
The implementation follows pp. 79-80 in [1]_.
|
800 |
+
|
801 |
+
The time complexity is $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$
|
802 |
+
elementary circuits.
|
803 |
+
|
804 |
+
References
|
805 |
+
----------
|
806 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
807 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
808 |
+
https://doi.org/10.1137/0204007
|
809 |
+
|
810 |
+
See Also
|
811 |
+
--------
|
812 |
+
simple_cycles, cycle_basis
|
813 |
+
"""
|
814 |
+
|
815 |
+
# Jon Olav Vik, 2010-08-09
|
816 |
+
def _unblock(thisnode):
|
817 |
+
"""Recursively unblock and remove nodes from B[thisnode]."""
|
818 |
+
if blocked[thisnode]:
|
819 |
+
blocked[thisnode] = False
|
820 |
+
while B[thisnode]:
|
821 |
+
_unblock(B[thisnode].pop())
|
822 |
+
|
823 |
+
def circuit(thisnode, startnode, component):
|
824 |
+
closed = False # set to True if elementary path is closed
|
825 |
+
path.append(thisnode)
|
826 |
+
blocked[thisnode] = True
|
827 |
+
for nextnode in component[thisnode]: # direct successors of thisnode
|
828 |
+
if nextnode == startnode:
|
829 |
+
result.append(path[:])
|
830 |
+
closed = True
|
831 |
+
elif not blocked[nextnode]:
|
832 |
+
if circuit(nextnode, startnode, component):
|
833 |
+
closed = True
|
834 |
+
if closed:
|
835 |
+
_unblock(thisnode)
|
836 |
+
else:
|
837 |
+
for nextnode in component[thisnode]:
|
838 |
+
if thisnode not in B[nextnode]: # TODO: use set for speedup?
|
839 |
+
B[nextnode].append(thisnode)
|
840 |
+
path.pop() # remove thisnode from path
|
841 |
+
return closed
|
842 |
+
|
843 |
+
path = [] # stack of nodes in current path
|
844 |
+
blocked = defaultdict(bool) # vertex: blocked from search?
|
845 |
+
B = defaultdict(list) # graph portions that yield no elementary circuit
|
846 |
+
result = [] # list to accumulate the circuits found
|
847 |
+
|
848 |
+
# Johnson's algorithm exclude self cycle edges like (v, v)
|
849 |
+
# To be backward compatible, we record those cycles in advance
|
850 |
+
# and then remove from subG
|
851 |
+
for v in G:
|
852 |
+
if G.has_edge(v, v):
|
853 |
+
result.append([v])
|
854 |
+
G.remove_edge(v, v)
|
855 |
+
|
856 |
+
# Johnson's algorithm requires some ordering of the nodes.
|
857 |
+
# They might not be sortable so we assign an arbitrary ordering.
|
858 |
+
ordering = dict(zip(G, range(len(G))))
|
859 |
+
for s in ordering:
|
860 |
+
# Build the subgraph induced by s and following nodes in the ordering
|
861 |
+
subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s])
|
862 |
+
# Find the strongly connected component in the subgraph
|
863 |
+
# that contains the least node according to the ordering
|
864 |
+
strongcomp = nx.strongly_connected_components(subgraph)
|
865 |
+
mincomp = min(strongcomp, key=lambda ns: min(ordering[n] for n in ns))
|
866 |
+
component = G.subgraph(mincomp)
|
867 |
+
if len(component) > 1:
|
868 |
+
# smallest node in the component according to the ordering
|
869 |
+
startnode = min(component, key=ordering.__getitem__)
|
870 |
+
for node in component:
|
871 |
+
blocked[node] = False
|
872 |
+
B[node][:] = []
|
873 |
+
dummy = circuit(startnode, startnode, component)
|
874 |
+
return result
|
875 |
+
|
876 |
+
|
877 |
+
@nx._dispatchable
|
878 |
+
def find_cycle(G, source=None, orientation=None):
|
879 |
+
"""Returns a cycle found via depth-first traversal.
|
880 |
+
|
881 |
+
The cycle is a list of edges indicating the cyclic path.
|
882 |
+
Orientation of directed edges is controlled by `orientation`.
|
883 |
+
|
884 |
+
Parameters
|
885 |
+
----------
|
886 |
+
G : graph
|
887 |
+
A directed/undirected graph/multigraph.
|
888 |
+
|
889 |
+
source : node, list of nodes
|
890 |
+
The node from which the traversal begins. If None, then a source
|
891 |
+
is chosen arbitrarily and repeatedly until all edges from each node in
|
892 |
+
the graph are searched.
|
893 |
+
|
894 |
+
orientation : None | 'original' | 'reverse' | 'ignore' (default: None)
|
895 |
+
For directed graphs and directed multigraphs, edge traversals need not
|
896 |
+
respect the original orientation of the edges.
|
897 |
+
When set to 'reverse' every edge is traversed in the reverse direction.
|
898 |
+
When set to 'ignore', every edge is treated as undirected.
|
899 |
+
When set to 'original', every edge is treated as directed.
|
900 |
+
In all three cases, the yielded edge tuples add a last entry to
|
901 |
+
indicate the direction in which that edge was traversed.
|
902 |
+
If orientation is None, the yielded edge has no direction indicated.
|
903 |
+
The direction is respected, but not reported.
|
904 |
+
|
905 |
+
Returns
|
906 |
+
-------
|
907 |
+
edges : directed edges
|
908 |
+
A list of directed edges indicating the path taken for the loop.
|
909 |
+
If no cycle is found, then an exception is raised.
|
910 |
+
For graphs, an edge is of the form `(u, v)` where `u` and `v`
|
911 |
+
are the tail and head of the edge as determined by the traversal.
|
912 |
+
For multigraphs, an edge is of the form `(u, v, key)`, where `key` is
|
913 |
+
the key of the edge. When the graph is directed, then `u` and `v`
|
914 |
+
are always in the order of the actual directed edge.
|
915 |
+
If orientation is not None then the edge tuple is extended to include
|
916 |
+
the direction of traversal ('forward' or 'reverse') on that edge.
|
917 |
+
|
918 |
+
Raises
|
919 |
+
------
|
920 |
+
NetworkXNoCycle
|
921 |
+
If no cycle was found.
|
922 |
+
|
923 |
+
Examples
|
924 |
+
--------
|
925 |
+
In this example, we construct a DAG and find, in the first call, that there
|
926 |
+
are no directed cycles, and so an exception is raised. In the second call,
|
927 |
+
we ignore edge orientations and find that there is an undirected cycle.
|
928 |
+
Note that the second call finds a directed cycle while effectively
|
929 |
+
traversing an undirected graph, and so, we found an "undirected cycle".
|
930 |
+
This means that this DAG structure does not form a directed tree (which
|
931 |
+
is also known as a polytree).
|
932 |
+
|
933 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
|
934 |
+
>>> nx.find_cycle(G, orientation="original")
|
935 |
+
Traceback (most recent call last):
|
936 |
+
...
|
937 |
+
networkx.exception.NetworkXNoCycle: No cycle found.
|
938 |
+
>>> list(nx.find_cycle(G, orientation="ignore"))
|
939 |
+
[(0, 1, 'forward'), (1, 2, 'forward'), (0, 2, 'reverse')]
|
940 |
+
|
941 |
+
See Also
|
942 |
+
--------
|
943 |
+
simple_cycles
|
944 |
+
"""
|
945 |
+
if not G.is_directed() or orientation in (None, "original"):
|
946 |
+
|
947 |
+
def tailhead(edge):
|
948 |
+
return edge[:2]
|
949 |
+
|
950 |
+
elif orientation == "reverse":
|
951 |
+
|
952 |
+
def tailhead(edge):
|
953 |
+
return edge[1], edge[0]
|
954 |
+
|
955 |
+
elif orientation == "ignore":
|
956 |
+
|
957 |
+
def tailhead(edge):
|
958 |
+
if edge[-1] == "reverse":
|
959 |
+
return edge[1], edge[0]
|
960 |
+
return edge[:2]
|
961 |
+
|
962 |
+
explored = set()
|
963 |
+
cycle = []
|
964 |
+
final_node = None
|
965 |
+
for start_node in G.nbunch_iter(source):
|
966 |
+
if start_node in explored:
|
967 |
+
# No loop is possible.
|
968 |
+
continue
|
969 |
+
|
970 |
+
edges = []
|
971 |
+
# All nodes seen in this iteration of edge_dfs
|
972 |
+
seen = {start_node}
|
973 |
+
# Nodes in active path.
|
974 |
+
active_nodes = {start_node}
|
975 |
+
previous_head = None
|
976 |
+
|
977 |
+
for edge in nx.edge_dfs(G, start_node, orientation):
|
978 |
+
# Determine if this edge is a continuation of the active path.
|
979 |
+
tail, head = tailhead(edge)
|
980 |
+
if head in explored:
|
981 |
+
# Then we've already explored it. No loop is possible.
|
982 |
+
continue
|
983 |
+
if previous_head is not None and tail != previous_head:
|
984 |
+
# This edge results from backtracking.
|
985 |
+
# Pop until we get a node whose head equals the current tail.
|
986 |
+
# So for example, we might have:
|
987 |
+
# (0, 1), (1, 2), (2, 3), (1, 4)
|
988 |
+
# which must become:
|
989 |
+
# (0, 1), (1, 4)
|
990 |
+
while True:
|
991 |
+
try:
|
992 |
+
popped_edge = edges.pop()
|
993 |
+
except IndexError:
|
994 |
+
edges = []
|
995 |
+
active_nodes = {tail}
|
996 |
+
break
|
997 |
+
else:
|
998 |
+
popped_head = tailhead(popped_edge)[1]
|
999 |
+
active_nodes.remove(popped_head)
|
1000 |
+
|
1001 |
+
if edges:
|
1002 |
+
last_head = tailhead(edges[-1])[1]
|
1003 |
+
if tail == last_head:
|
1004 |
+
break
|
1005 |
+
edges.append(edge)
|
1006 |
+
|
1007 |
+
if head in active_nodes:
|
1008 |
+
# We have a loop!
|
1009 |
+
cycle.extend(edges)
|
1010 |
+
final_node = head
|
1011 |
+
break
|
1012 |
+
else:
|
1013 |
+
seen.add(head)
|
1014 |
+
active_nodes.add(head)
|
1015 |
+
previous_head = head
|
1016 |
+
|
1017 |
+
if cycle:
|
1018 |
+
break
|
1019 |
+
else:
|
1020 |
+
explored.update(seen)
|
1021 |
+
|
1022 |
+
else:
|
1023 |
+
assert len(cycle) == 0
|
1024 |
+
raise nx.exception.NetworkXNoCycle("No cycle found.")
|
1025 |
+
|
1026 |
+
# We now have a list of edges which ends on a cycle.
|
1027 |
+
# So we need to remove from the beginning edges that are not relevant.
|
1028 |
+
|
1029 |
+
for i, edge in enumerate(cycle):
|
1030 |
+
tail, head = tailhead(edge)
|
1031 |
+
if tail == final_node:
|
1032 |
+
break
|
1033 |
+
|
1034 |
+
return cycle[i:]
|
1035 |
+
|
1036 |
+
|
1037 |
+
@not_implemented_for("directed")
|
1038 |
+
@not_implemented_for("multigraph")
|
1039 |
+
@nx._dispatchable(edge_attrs="weight")
|
1040 |
+
def minimum_cycle_basis(G, weight=None):
|
1041 |
+
"""Returns a minimum weight cycle basis for G
|
1042 |
+
|
1043 |
+
Minimum weight means a cycle basis for which the total weight
|
1044 |
+
(length for unweighted graphs) of all the cycles is minimum.
|
1045 |
+
|
1046 |
+
Parameters
|
1047 |
+
----------
|
1048 |
+
G : NetworkX Graph
|
1049 |
+
weight: string
|
1050 |
+
name of the edge attribute to use for edge weights
|
1051 |
+
|
1052 |
+
Returns
|
1053 |
+
-------
|
1054 |
+
A list of cycle lists. Each cycle list is a list of nodes
|
1055 |
+
which forms a cycle (loop) in G. Note that the nodes are not
|
1056 |
+
necessarily returned in a order by which they appear in the cycle
|
1057 |
+
|
1058 |
+
Examples
|
1059 |
+
--------
|
1060 |
+
>>> G = nx.Graph()
|
1061 |
+
>>> nx.add_cycle(G, [0, 1, 2, 3])
|
1062 |
+
>>> nx.add_cycle(G, [0, 3, 4, 5])
|
1063 |
+
>>> nx.minimum_cycle_basis(G)
|
1064 |
+
[[5, 4, 3, 0], [3, 2, 1, 0]]
|
1065 |
+
|
1066 |
+
References:
|
1067 |
+
[1] Kavitha, Telikepalli, et al. "An O(m^2n) Algorithm for
|
1068 |
+
Minimum Cycle Basis of Graphs."
|
1069 |
+
http://link.springer.com/article/10.1007/s00453-007-9064-z
|
1070 |
+
[2] de Pina, J. 1995. Applications of shortest path methods.
|
1071 |
+
Ph.D. thesis, University of Amsterdam, Netherlands
|
1072 |
+
|
1073 |
+
See Also
|
1074 |
+
--------
|
1075 |
+
simple_cycles, cycle_basis
|
1076 |
+
"""
|
1077 |
+
# We first split the graph in connected subgraphs
|
1078 |
+
return sum(
|
1079 |
+
(_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)),
|
1080 |
+
[],
|
1081 |
+
)
|
1082 |
+
|
1083 |
+
|
1084 |
+
def _min_cycle_basis(G, weight):
|
1085 |
+
cb = []
|
1086 |
+
# We extract the edges not in a spanning tree. We do not really need a
|
1087 |
+
# *minimum* spanning tree. That is why we call the next function with
|
1088 |
+
# weight=None. Depending on implementation, it may be faster as well
|
1089 |
+
tree_edges = list(nx.minimum_spanning_edges(G, weight=None, data=False))
|
1090 |
+
chords = G.edges - tree_edges - {(v, u) for u, v in tree_edges}
|
1091 |
+
|
1092 |
+
# We maintain a set of vectors orthogonal to sofar found cycles
|
1093 |
+
set_orth = [{edge} for edge in chords]
|
1094 |
+
while set_orth:
|
1095 |
+
base = set_orth.pop()
|
1096 |
+
# kth cycle is "parallel" to kth vector in set_orth
|
1097 |
+
cycle_edges = _min_cycle(G, base, weight)
|
1098 |
+
cb.append([v for u, v in cycle_edges])
|
1099 |
+
|
1100 |
+
# now update set_orth so that k+1,k+2... th elements are
|
1101 |
+
# orthogonal to the newly found cycle, as per [p. 336, 1]
|
1102 |
+
set_orth = [
|
1103 |
+
(
|
1104 |
+
{e for e in orth if e not in base if e[::-1] not in base}
|
1105 |
+
| {e for e in base if e not in orth if e[::-1] not in orth}
|
1106 |
+
)
|
1107 |
+
if sum((e in orth or e[::-1] in orth) for e in cycle_edges) % 2
|
1108 |
+
else orth
|
1109 |
+
for orth in set_orth
|
1110 |
+
]
|
1111 |
+
return cb
|
1112 |
+
|
1113 |
+
|
1114 |
+
def _min_cycle(G, orth, weight):
|
1115 |
+
"""
|
1116 |
+
Computes the minimum weight cycle in G,
|
1117 |
+
orthogonal to the vector orth as per [p. 338, 1]
|
1118 |
+
Use (u, 1) to indicate the lifted copy of u (denoted u' in paper).
|
1119 |
+
"""
|
1120 |
+
Gi = nx.Graph()
|
1121 |
+
|
1122 |
+
# Add 2 copies of each edge in G to Gi.
|
1123 |
+
# If edge is in orth, add cross edge; otherwise in-plane edge
|
1124 |
+
for u, v, wt in G.edges(data=weight, default=1):
|
1125 |
+
if (u, v) in orth or (v, u) in orth:
|
1126 |
+
Gi.add_edges_from([(u, (v, 1)), ((u, 1), v)], Gi_weight=wt)
|
1127 |
+
else:
|
1128 |
+
Gi.add_edges_from([(u, v), ((u, 1), (v, 1))], Gi_weight=wt)
|
1129 |
+
|
1130 |
+
# find the shortest length in Gi between n and (n, 1) for each n
|
1131 |
+
# Note: Use "Gi_weight" for name of weight attribute
|
1132 |
+
spl = nx.shortest_path_length
|
1133 |
+
lift = {n: spl(Gi, source=n, target=(n, 1), weight="Gi_weight") for n in G}
|
1134 |
+
|
1135 |
+
# Now compute that short path in Gi, which translates to a cycle in G
|
1136 |
+
start = min(lift, key=lift.get)
|
1137 |
+
end = (start, 1)
|
1138 |
+
min_path_i = nx.shortest_path(Gi, source=start, target=end, weight="Gi_weight")
|
1139 |
+
|
1140 |
+
# Now we obtain the actual path, re-map nodes in Gi to those in G
|
1141 |
+
min_path = [n if n in G else n[0] for n in min_path_i]
|
1142 |
+
|
1143 |
+
# Now remove the edges that occur two times
|
1144 |
+
# two passes: flag which edges get kept, then build it
|
1145 |
+
edgelist = list(pairwise(min_path))
|
1146 |
+
edgeset = set()
|
1147 |
+
for e in edgelist:
|
1148 |
+
if e in edgeset:
|
1149 |
+
edgeset.remove(e)
|
1150 |
+
elif e[::-1] in edgeset:
|
1151 |
+
edgeset.remove(e[::-1])
|
1152 |
+
else:
|
1153 |
+
edgeset.add(e)
|
1154 |
+
|
1155 |
+
min_edgelist = []
|
1156 |
+
for e in edgelist:
|
1157 |
+
if e in edgeset:
|
1158 |
+
min_edgelist.append(e)
|
1159 |
+
edgeset.remove(e)
|
1160 |
+
elif e[::-1] in edgeset:
|
1161 |
+
min_edgelist.append(e[::-1])
|
1162 |
+
edgeset.remove(e[::-1])
|
1163 |
+
|
1164 |
+
return min_edgelist
|
1165 |
+
|
1166 |
+
|
1167 |
+
@not_implemented_for("directed")
|
1168 |
+
@not_implemented_for("multigraph")
|
1169 |
+
@nx._dispatchable
|
1170 |
+
def girth(G):
|
1171 |
+
"""Returns the girth of the graph.
|
1172 |
+
|
1173 |
+
The girth of a graph is the length of its shortest cycle, or infinity if
|
1174 |
+
the graph is acyclic. The algorithm follows the description given on the
|
1175 |
+
Wikipedia page [1]_, and runs in time O(mn) on a graph with m edges and n
|
1176 |
+
nodes.
|
1177 |
+
|
1178 |
+
Parameters
|
1179 |
+
----------
|
1180 |
+
G : NetworkX Graph
|
1181 |
+
|
1182 |
+
Returns
|
1183 |
+
-------
|
1184 |
+
int or math.inf
|
1185 |
+
|
1186 |
+
Examples
|
1187 |
+
--------
|
1188 |
+
All examples below (except P_5) can easily be checked using Wikipedia,
|
1189 |
+
which has a page for each of these famous graphs.
|
1190 |
+
|
1191 |
+
>>> nx.girth(nx.chvatal_graph())
|
1192 |
+
4
|
1193 |
+
>>> nx.girth(nx.tutte_graph())
|
1194 |
+
4
|
1195 |
+
>>> nx.girth(nx.petersen_graph())
|
1196 |
+
5
|
1197 |
+
>>> nx.girth(nx.heawood_graph())
|
1198 |
+
6
|
1199 |
+
>>> nx.girth(nx.pappus_graph())
|
1200 |
+
6
|
1201 |
+
>>> nx.girth(nx.path_graph(5))
|
1202 |
+
inf
|
1203 |
+
|
1204 |
+
References
|
1205 |
+
----------
|
1206 |
+
.. [1] `Wikipedia: Girth <https://en.wikipedia.org/wiki/Girth_(graph_theory)>`_
|
1207 |
+
|
1208 |
+
"""
|
1209 |
+
girth = depth_limit = inf
|
1210 |
+
tree_edge = nx.algorithms.traversal.breadth_first_search.TREE_EDGE
|
1211 |
+
level_edge = nx.algorithms.traversal.breadth_first_search.LEVEL_EDGE
|
1212 |
+
for n in G:
|
1213 |
+
# run a BFS from source n, keeping track of distances; since we want
|
1214 |
+
# the shortest cycle, no need to explore beyond the current minimum length
|
1215 |
+
depth = {n: 0}
|
1216 |
+
for u, v, label in nx.bfs_labeled_edges(G, n):
|
1217 |
+
du = depth[u]
|
1218 |
+
if du > depth_limit:
|
1219 |
+
break
|
1220 |
+
if label is tree_edge:
|
1221 |
+
depth[v] = du + 1
|
1222 |
+
else:
|
1223 |
+
# if (u, v) is a level edge, the length is du + du + 1 (odd)
|
1224 |
+
# otherwise, it's a forward edge; length is du + (du + 1) + 1 (even)
|
1225 |
+
delta = label is level_edge
|
1226 |
+
length = du + du + 2 - delta
|
1227 |
+
if length < girth:
|
1228 |
+
girth = length
|
1229 |
+
depth_limit = du - delta
|
1230 |
+
|
1231 |
+
return girth
|
venv/lib/python3.10/site-packages/networkx/algorithms/d_separation.py
ADDED
@@ -0,0 +1,722 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Algorithm for testing d-separation in DAGs.
|
3 |
+
|
4 |
+
*d-separation* is a test for conditional independence in probability
|
5 |
+
distributions that can be factorized using DAGs. It is a purely
|
6 |
+
graphical test that uses the underlying graph and makes no reference
|
7 |
+
to the actual distribution parameters. See [1]_ for a formal
|
8 |
+
definition.
|
9 |
+
|
10 |
+
The implementation is based on the conceptually simple linear time
|
11 |
+
algorithm presented in [2]_. Refer to [3]_, [4]_ for a couple of
|
12 |
+
alternative algorithms.
|
13 |
+
|
14 |
+
The functional interface in NetworkX consists of three functions:
|
15 |
+
|
16 |
+
- `find_minimal_d_separator` returns a minimal d-separator set ``z``.
|
17 |
+
That is, removing any node or nodes from it makes it no longer a d-separator.
|
18 |
+
- `is_d_separator` checks if a given set is a d-separator.
|
19 |
+
- `is_minimal_d_separator` checks if a given set is a minimal d-separator.
|
20 |
+
|
21 |
+
D-separators
|
22 |
+
------------
|
23 |
+
|
24 |
+
Here, we provide a brief overview of d-separation and related concepts that
|
25 |
+
are relevant for understanding it:
|
26 |
+
|
27 |
+
The ideas of d-separation and d-connection relate to paths being open or blocked.
|
28 |
+
|
29 |
+
- A "path" is a sequence of nodes connected in order by edges. Unlike for most
|
30 |
+
graph theory analysis, the direction of the edges is ignored. Thus the path
|
31 |
+
can be thought of as a traditional path on the undirected version of the graph.
|
32 |
+
- A "candidate d-separator" ``z`` is a set of nodes being considered as
|
33 |
+
possibly blocking all paths between two prescribed sets ``x`` and ``y`` of nodes.
|
34 |
+
We refer to each node in the candidate d-separator as "known".
|
35 |
+
- A "collider" node on a path is a node that is a successor of its two neighbor
|
36 |
+
nodes on the path. That is, ``c`` is a collider if the edge directions
|
37 |
+
along the path look like ``... u -> c <- v ...``.
|
38 |
+
- If a collider node or any of its descendants are "known", the collider
|
39 |
+
is called an "open collider". Otherwise it is a "blocking collider".
|
40 |
+
- Any path can be "blocked" in two ways. If the path contains a "known" node
|
41 |
+
that is not a collider, the path is blocked. Also, if the path contains a
|
42 |
+
collider that is not a "known" node, the path is blocked.
|
43 |
+
- A path is "open" if it is not blocked. That is, it is open if every node is
|
44 |
+
either an open collider or not a "known". Said another way, every
|
45 |
+
"known" in the path is a collider and every collider is open (has a
|
46 |
+
"known" as a inclusive descendant). The concept of "open path" is meant to
|
47 |
+
demonstrate a probabilistic conditional dependence between two nodes given
|
48 |
+
prescribed knowledge ("known" nodes).
|
49 |
+
- Two sets ``x`` and ``y`` of nodes are "d-separated" by a set of nodes ``z``
|
50 |
+
if all paths between nodes in ``x`` and nodes in ``y`` are blocked. That is,
|
51 |
+
if there are no open paths from any node in ``x`` to any node in ``y``.
|
52 |
+
Such a set ``z`` is a "d-separator" of ``x`` and ``y``.
|
53 |
+
- A "minimal d-separator" is a d-separator ``z`` for which no node or subset
|
54 |
+
of nodes can be removed with it still being a d-separator.
|
55 |
+
|
56 |
+
The d-separator blocks some paths between ``x`` and ``y`` but opens others.
|
57 |
+
Nodes in the d-separator block paths if the nodes are not colliders.
|
58 |
+
But if a collider or its descendant nodes are in the d-separation set, the
|
59 |
+
colliders are open, allowing a path through that collider.
|
60 |
+
|
61 |
+
Illustration of D-separation with examples
|
62 |
+
------------------------------------------
|
63 |
+
|
64 |
+
A pair of two nodes, ``u`` and ``v``, are d-connected if there is a path
|
65 |
+
from ``u`` to ``v`` that is not blocked. That means, there is an open
|
66 |
+
path from ``u`` to ``v``.
|
67 |
+
|
68 |
+
For example, if the d-separating set is the empty set, then the following paths are
|
69 |
+
open between ``u`` and ``v``:
|
70 |
+
|
71 |
+
- u <- n -> v
|
72 |
+
- u -> w -> ... -> n -> v
|
73 |
+
|
74 |
+
If on the other hand, ``n`` is in the d-separating set, then ``n`` blocks
|
75 |
+
those paths between ``u`` and ``v``.
|
76 |
+
|
77 |
+
Colliders block a path if they and their descendants are not included
|
78 |
+
in the d-separating set. An example of a path that is blocked when the
|
79 |
+
d-separating set is empty is:
|
80 |
+
|
81 |
+
- u -> w -> ... -> n <- v
|
82 |
+
|
83 |
+
The node ``n`` is a collider in this path and is not in the d-separating set.
|
84 |
+
So ``n`` blocks this path. However, if ``n`` or a descendant of ``n`` is
|
85 |
+
included in the d-separating set, then the path through the collider
|
86 |
+
at ``n`` (... -> n <- ...) is "open".
|
87 |
+
|
88 |
+
D-separation is concerned with blocking all paths between nodes from ``x`` to ``y``.
|
89 |
+
A d-separating set between ``x`` and ``y`` is one where all paths are blocked.
|
90 |
+
|
91 |
+
D-separation and its applications in probability
|
92 |
+
------------------------------------------------
|
93 |
+
|
94 |
+
D-separation is commonly used in probabilistic causal-graph models. D-separation
|
95 |
+
connects the idea of probabilistic "dependence" with separation in a graph. If
|
96 |
+
one assumes the causal Markov condition [5]_, (every node is conditionally
|
97 |
+
independent of its non-descendants, given its parents) then d-separation implies
|
98 |
+
conditional independence in probability distributions.
|
99 |
+
Symmetrically, d-connection implies dependence.
|
100 |
+
|
101 |
+
The intuition is as follows. The edges on a causal graph indicate which nodes
|
102 |
+
influence the outcome of other nodes directly. An edge from u to v
|
103 |
+
implies that the outcome of event ``u`` influences the probabilities for
|
104 |
+
the outcome of event ``v``. Certainly knowing ``u`` changes predictions for ``v``.
|
105 |
+
But also knowing ``v`` changes predictions for ``u``. The outcomes are dependent.
|
106 |
+
Furthermore, an edge from ``v`` to ``w`` would mean that ``w`` and ``v`` are dependent
|
107 |
+
and thus that ``u`` could indirectly influence ``w``.
|
108 |
+
|
109 |
+
Without any knowledge about the system (candidate d-separating set is empty)
|
110 |
+
a causal graph ``u -> v -> w`` allows all three nodes to be dependent. But
|
111 |
+
if we know the outcome of ``v``, the conditional probabilities of outcomes for
|
112 |
+
``u`` and ``w`` are independent of each other. That is, once we know the outcome
|
113 |
+
for ```v`, the probabilities for ``w`` do not depend on the outcome for ``u``.
|
114 |
+
This is the idea behind ``v`` blocking the path if it is "known" (in the candidate
|
115 |
+
d-separating set).
|
116 |
+
|
117 |
+
The same argument works whether the direction of the edges are both
|
118 |
+
left-going and when both arrows head out from the middle. Having a "known"
|
119 |
+
node on a path blocks the collider-free path because those relationships
|
120 |
+
make the conditional probabilities independent.
|
121 |
+
|
122 |
+
The direction of the causal edges does impact dependence precisely in the
|
123 |
+
case of a collider e.g. ``u -> v <- w``. In that situation, both ``u`` and ``w``
|
124 |
+
influence ``v```. But they do not directly influence each other. So without any
|
125 |
+
knowledge of any outcomes, ``u`` and ``w`` are independent. That is the idea behind
|
126 |
+
colliders blocking the path. But, if ``v`` is known, the conditional probabilities
|
127 |
+
of ``u`` and ``w`` can be dependent. This is the heart of Berkson's Paradox [6]_.
|
128 |
+
For example, suppose ``u`` and ``w`` are boolean events (they either happen or do not)
|
129 |
+
and ``v`` represents the outcome "at least one of ``u`` and ``w`` occur". Then knowing
|
130 |
+
``v`` is true makes the conditional probabilities of ``u`` and ``w`` dependent.
|
131 |
+
Essentially, knowing that at least one of them is true raises the probability of
|
132 |
+
each. But further knowledge that ``w`` is true (or false) change the conditional
|
133 |
+
probability of ``u`` to either the original value or 1. So the conditional
|
134 |
+
probability of ``u`` depends on the outcome of ``w`` even though there is no
|
135 |
+
causal relationship between them. When a collider is known, dependence can
|
136 |
+
occur across paths through that collider. This is the reason open colliders
|
137 |
+
do not block paths.
|
138 |
+
|
139 |
+
Furthermore, even if ``v`` is not "known", if one of its descendants is "known"
|
140 |
+
we can use that information to know more about ``v`` which again makes
|
141 |
+
``u`` and ``w`` potentially dependent. Suppose the chance of ``n`` occurring
|
142 |
+
is much higher when ``v`` occurs ("at least one of ``u`` and ``w`` occur").
|
143 |
+
Then if we know ``n`` occurred, it is more likely that ``v`` occurred and that
|
144 |
+
makes the chance of ``u`` and ``w`` dependent. This is the idea behind why
|
145 |
+
a collider does no block a path if any descendant of the collider is "known".
|
146 |
+
|
147 |
+
When two sets of nodes ``x`` and ``y`` are d-separated by a set ``z``,
|
148 |
+
it means that given the outcomes of the nodes in ``z``, the probabilities
|
149 |
+
of outcomes of the nodes in ``x`` are independent of the outcomes of the
|
150 |
+
nodes in ``y`` and vice versa.
|
151 |
+
|
152 |
+
Examples
|
153 |
+
--------
|
154 |
+
A Hidden Markov Model with 5 observed states and 5 hidden states
|
155 |
+
where the hidden states have causal relationships resulting in
|
156 |
+
a path results in the following causal network. We check that
|
157 |
+
early states along the path are separated from late state in
|
158 |
+
the path by the d-separator of the middle hidden state.
|
159 |
+
Thus if we condition on the middle hidden state, the early
|
160 |
+
state probabilities are independent of the late state outcomes.
|
161 |
+
|
162 |
+
>>> G = nx.DiGraph()
|
163 |
+
>>> G.add_edges_from(
|
164 |
+
... [
|
165 |
+
... ("H1", "H2"),
|
166 |
+
... ("H2", "H3"),
|
167 |
+
... ("H3", "H4"),
|
168 |
+
... ("H4", "H5"),
|
169 |
+
... ("H1", "O1"),
|
170 |
+
... ("H2", "O2"),
|
171 |
+
... ("H3", "O3"),
|
172 |
+
... ("H4", "O4"),
|
173 |
+
... ("H5", "O5"),
|
174 |
+
... ]
|
175 |
+
... )
|
176 |
+
>>> x, y, z = ({"H1", "O1"}, {"H5", "O5"}, {"H3"})
|
177 |
+
>>> nx.is_d_separator(G, x, y, z)
|
178 |
+
True
|
179 |
+
>>> nx.is_minimal_d_separator(G, x, y, z)
|
180 |
+
True
|
181 |
+
>>> nx.is_minimal_d_separator(G, x, y, z | {"O3"})
|
182 |
+
False
|
183 |
+
>>> z = nx.find_minimal_d_separator(G, x | y, {"O2", "O3", "O4"})
|
184 |
+
>>> z == {"H2", "H4"}
|
185 |
+
True
|
186 |
+
|
187 |
+
If no minimal_d_separator exists, `None` is returned
|
188 |
+
|
189 |
+
>>> other_z = nx.find_minimal_d_separator(G, x | y, {"H2", "H3"})
|
190 |
+
>>> other_z is None
|
191 |
+
True
|
192 |
+
|
193 |
+
|
194 |
+
References
|
195 |
+
----------
|
196 |
+
|
197 |
+
.. [1] Pearl, J. (2009). Causality. Cambridge: Cambridge University Press.
|
198 |
+
|
199 |
+
.. [2] Darwiche, A. (2009). Modeling and reasoning with Bayesian networks.
|
200 |
+
Cambridge: Cambridge University Press.
|
201 |
+
|
202 |
+
.. [3] Shachter, Ross D. "Bayes-ball: The rational pastime (for
|
203 |
+
determining irrelevance and requisite information in belief networks
|
204 |
+
and influence diagrams)." In Proceedings of the Fourteenth Conference
|
205 |
+
on Uncertainty in Artificial Intelligence (UAI), (pp. 480–487). 1998.
|
206 |
+
|
207 |
+
.. [4] Koller, D., & Friedman, N. (2009).
|
208 |
+
Probabilistic graphical models: principles and techniques. The MIT Press.
|
209 |
+
|
210 |
+
.. [5] https://en.wikipedia.org/wiki/Causal_Markov_condition
|
211 |
+
|
212 |
+
.. [6] https://en.wikipedia.org/wiki/Berkson%27s_paradox
|
213 |
+
|
214 |
+
"""
|
215 |
+
|
216 |
+
from collections import deque
|
217 |
+
from itertools import chain
|
218 |
+
|
219 |
+
import networkx as nx
|
220 |
+
from networkx.utils import UnionFind, not_implemented_for
|
221 |
+
|
222 |
+
__all__ = [
|
223 |
+
"is_d_separator",
|
224 |
+
"is_minimal_d_separator",
|
225 |
+
"find_minimal_d_separator",
|
226 |
+
"d_separated",
|
227 |
+
"minimal_d_separator",
|
228 |
+
]
|
229 |
+
|
230 |
+
|
231 |
+
@not_implemented_for("undirected")
|
232 |
+
@nx._dispatchable
|
233 |
+
def is_d_separator(G, x, y, z):
|
234 |
+
"""Return whether node sets `x` and `y` are d-separated by `z`.
|
235 |
+
|
236 |
+
Parameters
|
237 |
+
----------
|
238 |
+
G : nx.DiGraph
|
239 |
+
A NetworkX DAG.
|
240 |
+
|
241 |
+
x : node or set of nodes
|
242 |
+
First node or set of nodes in `G`.
|
243 |
+
|
244 |
+
y : node or set of nodes
|
245 |
+
Second node or set of nodes in `G`.
|
246 |
+
|
247 |
+
z : node or set of nodes
|
248 |
+
Potential separator (set of conditioning nodes in `G`). Can be empty set.
|
249 |
+
|
250 |
+
Returns
|
251 |
+
-------
|
252 |
+
b : bool
|
253 |
+
A boolean that is true if `x` is d-separated from `y` given `z` in `G`.
|
254 |
+
|
255 |
+
Raises
|
256 |
+
------
|
257 |
+
NetworkXError
|
258 |
+
The *d-separation* test is commonly used on disjoint sets of
|
259 |
+
nodes in acyclic directed graphs. Accordingly, the algorithm
|
260 |
+
raises a :exc:`NetworkXError` if the node sets are not
|
261 |
+
disjoint or if the input graph is not a DAG.
|
262 |
+
|
263 |
+
NodeNotFound
|
264 |
+
If any of the input nodes are not found in the graph,
|
265 |
+
a :exc:`NodeNotFound` exception is raised
|
266 |
+
|
267 |
+
Notes
|
268 |
+
-----
|
269 |
+
A d-separating set in a DAG is a set of nodes that
|
270 |
+
blocks all paths between the two sets. Nodes in `z`
|
271 |
+
block a path if they are part of the path and are not a collider,
|
272 |
+
or a descendant of a collider. Also colliders that are not in `z`
|
273 |
+
block a path. A collider structure along a path
|
274 |
+
is ``... -> c <- ...`` where ``c`` is the collider node.
|
275 |
+
|
276 |
+
https://en.wikipedia.org/wiki/Bayesian_network#d-separation
|
277 |
+
"""
|
278 |
+
try:
|
279 |
+
x = {x} if x in G else x
|
280 |
+
y = {y} if y in G else y
|
281 |
+
z = {z} if z in G else z
|
282 |
+
|
283 |
+
intersection = x & y or x & z or y & z
|
284 |
+
if intersection:
|
285 |
+
raise nx.NetworkXError(
|
286 |
+
f"The sets are not disjoint, with intersection {intersection}"
|
287 |
+
)
|
288 |
+
|
289 |
+
set_v = x | y | z
|
290 |
+
if set_v - G.nodes:
|
291 |
+
raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are not found in G")
|
292 |
+
except TypeError:
|
293 |
+
raise nx.NodeNotFound("One of x, y, or z is not a node or a set of nodes in G")
|
294 |
+
|
295 |
+
if not nx.is_directed_acyclic_graph(G):
|
296 |
+
raise nx.NetworkXError("graph should be directed acyclic")
|
297 |
+
|
298 |
+
# contains -> and <-> edges from starting node T
|
299 |
+
forward_deque = deque([])
|
300 |
+
forward_visited = set()
|
301 |
+
|
302 |
+
# contains <- and - edges from starting node T
|
303 |
+
backward_deque = deque(x)
|
304 |
+
backward_visited = set()
|
305 |
+
|
306 |
+
ancestors_or_z = set().union(*[nx.ancestors(G, node) for node in x]) | z | x
|
307 |
+
|
308 |
+
while forward_deque or backward_deque:
|
309 |
+
if backward_deque:
|
310 |
+
node = backward_deque.popleft()
|
311 |
+
backward_visited.add(node)
|
312 |
+
if node in y:
|
313 |
+
return False
|
314 |
+
if node in z:
|
315 |
+
continue
|
316 |
+
|
317 |
+
# add <- edges to backward deque
|
318 |
+
backward_deque.extend(G.pred[node].keys() - backward_visited)
|
319 |
+
# add -> edges to forward deque
|
320 |
+
forward_deque.extend(G.succ[node].keys() - forward_visited)
|
321 |
+
|
322 |
+
if forward_deque:
|
323 |
+
node = forward_deque.popleft()
|
324 |
+
forward_visited.add(node)
|
325 |
+
if node in y:
|
326 |
+
return False
|
327 |
+
|
328 |
+
# Consider if -> node <- is opened due to ancestor of node in z
|
329 |
+
if node in ancestors_or_z:
|
330 |
+
# add <- edges to backward deque
|
331 |
+
backward_deque.extend(G.pred[node].keys() - backward_visited)
|
332 |
+
if node not in z:
|
333 |
+
# add -> edges to forward deque
|
334 |
+
forward_deque.extend(G.succ[node].keys() - forward_visited)
|
335 |
+
|
336 |
+
return True
|
337 |
+
|
338 |
+
|
339 |
+
@not_implemented_for("undirected")
|
340 |
+
@nx._dispatchable
|
341 |
+
def find_minimal_d_separator(G, x, y, *, included=None, restricted=None):
|
342 |
+
"""Returns a minimal d-separating set between `x` and `y` if possible
|
343 |
+
|
344 |
+
A d-separating set in a DAG is a set of nodes that blocks all
|
345 |
+
paths between the two sets of nodes, `x` and `y`. This function
|
346 |
+
constructs a d-separating set that is "minimal", meaning no nodes can
|
347 |
+
be removed without it losing the d-separating property for `x` and `y`.
|
348 |
+
If no d-separating sets exist for `x` and `y`, this returns `None`.
|
349 |
+
|
350 |
+
In a DAG there may be more than one minimal d-separator between two
|
351 |
+
sets of nodes. Minimal d-separators are not always unique. This function
|
352 |
+
returns one minimal d-separator, or `None` if no d-separator exists.
|
353 |
+
|
354 |
+
Uses the algorithm presented in [1]_. The complexity of the algorithm
|
355 |
+
is :math:`O(m)`, where :math:`m` stands for the number of edges in
|
356 |
+
the subgraph of G consisting of only the ancestors of `x` and `y`.
|
357 |
+
For full details, see [1]_.
|
358 |
+
|
359 |
+
Parameters
|
360 |
+
----------
|
361 |
+
G : graph
|
362 |
+
A networkx DAG.
|
363 |
+
x : set | node
|
364 |
+
A node or set of nodes in the graph.
|
365 |
+
y : set | node
|
366 |
+
A node or set of nodes in the graph.
|
367 |
+
included : set | node | None
|
368 |
+
A node or set of nodes which must be included in the found separating set,
|
369 |
+
default is None, which means the empty set.
|
370 |
+
restricted : set | node | None
|
371 |
+
Restricted node or set of nodes to consider. Only these nodes can be in
|
372 |
+
the found separating set, default is None meaning all nodes in ``G``.
|
373 |
+
|
374 |
+
Returns
|
375 |
+
-------
|
376 |
+
z : set | None
|
377 |
+
The minimal d-separating set, if at least one d-separating set exists,
|
378 |
+
otherwise None.
|
379 |
+
|
380 |
+
Raises
|
381 |
+
------
|
382 |
+
NetworkXError
|
383 |
+
Raises a :exc:`NetworkXError` if the input graph is not a DAG
|
384 |
+
or if node sets `x`, `y`, and `included` are not disjoint.
|
385 |
+
|
386 |
+
NodeNotFound
|
387 |
+
If any of the input nodes are not found in the graph,
|
388 |
+
a :exc:`NodeNotFound` exception is raised.
|
389 |
+
|
390 |
+
References
|
391 |
+
----------
|
392 |
+
.. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding
|
393 |
+
minimal d-separators in linear time and applications." In
|
394 |
+
Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020.
|
395 |
+
"""
|
396 |
+
if not nx.is_directed_acyclic_graph(G):
|
397 |
+
raise nx.NetworkXError("graph should be directed acyclic")
|
398 |
+
|
399 |
+
try:
|
400 |
+
x = {x} if x in G else x
|
401 |
+
y = {y} if y in G else y
|
402 |
+
|
403 |
+
if included is None:
|
404 |
+
included = set()
|
405 |
+
elif included in G:
|
406 |
+
included = {included}
|
407 |
+
|
408 |
+
if restricted is None:
|
409 |
+
restricted = set(G)
|
410 |
+
elif restricted in G:
|
411 |
+
restricted = {restricted}
|
412 |
+
|
413 |
+
set_y = x | y | included | restricted
|
414 |
+
if set_y - G.nodes:
|
415 |
+
raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G")
|
416 |
+
except TypeError:
|
417 |
+
raise nx.NodeNotFound(
|
418 |
+
"One of x, y, included or restricted is not a node or set of nodes in G"
|
419 |
+
)
|
420 |
+
|
421 |
+
if not included <= restricted:
|
422 |
+
raise nx.NetworkXError(
|
423 |
+
f"Included nodes {included} must be in restricted nodes {restricted}"
|
424 |
+
)
|
425 |
+
|
426 |
+
intersection = x & y or x & included or y & included
|
427 |
+
if intersection:
|
428 |
+
raise nx.NetworkXError(
|
429 |
+
f"The sets x, y, included are not disjoint. Overlap: {intersection}"
|
430 |
+
)
|
431 |
+
|
432 |
+
nodeset = x | y | included
|
433 |
+
ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, node) for node in nodeset])
|
434 |
+
|
435 |
+
z_init = restricted & (ancestors_x_y_included - (x | y))
|
436 |
+
|
437 |
+
x_closure = _reachable(G, x, ancestors_x_y_included, z_init)
|
438 |
+
if x_closure & y:
|
439 |
+
return None
|
440 |
+
|
441 |
+
z_updated = z_init & (x_closure | included)
|
442 |
+
y_closure = _reachable(G, y, ancestors_x_y_included, z_updated)
|
443 |
+
return z_updated & (y_closure | included)
|
444 |
+
|
445 |
+
|
446 |
+
@not_implemented_for("undirected")
|
447 |
+
@nx._dispatchable
|
448 |
+
def is_minimal_d_separator(G, x, y, z, *, included=None, restricted=None):
|
449 |
+
"""Determine if `z` is a minimal d-separator for `x` and `y`.
|
450 |
+
|
451 |
+
A d-separator, `z`, in a DAG is a set of nodes that blocks
|
452 |
+
all paths from nodes in set `x` to nodes in set `y`.
|
453 |
+
A minimal d-separator is a d-separator `z` such that removing
|
454 |
+
any subset of nodes makes it no longer a d-separator.
|
455 |
+
|
456 |
+
Note: This function checks whether `z` is a d-separator AND is
|
457 |
+
minimal. One can use the function `is_d_separator` to only check if
|
458 |
+
`z` is a d-separator. See examples below.
|
459 |
+
|
460 |
+
Parameters
|
461 |
+
----------
|
462 |
+
G : nx.DiGraph
|
463 |
+
A NetworkX DAG.
|
464 |
+
x : node | set
|
465 |
+
A node or set of nodes in the graph.
|
466 |
+
y : node | set
|
467 |
+
A node or set of nodes in the graph.
|
468 |
+
z : node | set
|
469 |
+
The node or set of nodes to check if it is a minimal d-separating set.
|
470 |
+
The function :func:`is_d_separator` is called inside this function
|
471 |
+
to verify that `z` is in fact a d-separator.
|
472 |
+
included : set | node | None
|
473 |
+
A node or set of nodes which must be included in the found separating set,
|
474 |
+
default is ``None``, which means the empty set.
|
475 |
+
restricted : set | node | None
|
476 |
+
Restricted node or set of nodes to consider. Only these nodes can be in
|
477 |
+
the found separating set, default is ``None`` meaning all nodes in ``G``.
|
478 |
+
|
479 |
+
Returns
|
480 |
+
-------
|
481 |
+
bool
|
482 |
+
Whether or not the set `z` is a minimal d-separator subject to
|
483 |
+
`restricted` nodes and `included` node constraints.
|
484 |
+
|
485 |
+
Examples
|
486 |
+
--------
|
487 |
+
>>> G = nx.path_graph([0, 1, 2, 3], create_using=nx.DiGraph)
|
488 |
+
>>> G.add_node(4)
|
489 |
+
>>> nx.is_minimal_d_separator(G, 0, 2, {1})
|
490 |
+
True
|
491 |
+
>>> # since {1} is the minimal d-separator, {1, 3, 4} is not minimal
|
492 |
+
>>> nx.is_minimal_d_separator(G, 0, 2, {1, 3, 4})
|
493 |
+
False
|
494 |
+
>>> # alternatively, if we only want to check that {1, 3, 4} is a d-separator
|
495 |
+
>>> nx.is_d_separator(G, 0, 2, {1, 3, 4})
|
496 |
+
True
|
497 |
+
|
498 |
+
Raises
|
499 |
+
------
|
500 |
+
NetworkXError
|
501 |
+
Raises a :exc:`NetworkXError` if the input graph is not a DAG.
|
502 |
+
|
503 |
+
NodeNotFound
|
504 |
+
If any of the input nodes are not found in the graph,
|
505 |
+
a :exc:`NodeNotFound` exception is raised.
|
506 |
+
|
507 |
+
References
|
508 |
+
----------
|
509 |
+
.. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding
|
510 |
+
minimal d-separators in linear time and applications." In
|
511 |
+
Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020.
|
512 |
+
|
513 |
+
Notes
|
514 |
+
-----
|
515 |
+
This function works on verifying that a set is minimal and
|
516 |
+
d-separating between two nodes. Uses criterion (a), (b), (c) on
|
517 |
+
page 4 of [1]_. a) closure(`x`) and `y` are disjoint. b) `z` contains
|
518 |
+
all nodes from `included` and is contained in the `restricted`
|
519 |
+
nodes and in the union of ancestors of `x`, `y`, and `included`.
|
520 |
+
c) the nodes in `z` not in `included` are contained in both
|
521 |
+
closure(x) and closure(y). The closure of a set is the set of nodes
|
522 |
+
connected to the set by a directed path in G.
|
523 |
+
|
524 |
+
The complexity is :math:`O(m)`, where :math:`m` stands for the
|
525 |
+
number of edges in the subgraph of G consisting of only the
|
526 |
+
ancestors of `x` and `y`.
|
527 |
+
|
528 |
+
For full details, see [1]_.
|
529 |
+
"""
|
530 |
+
if not nx.is_directed_acyclic_graph(G):
|
531 |
+
raise nx.NetworkXError("graph should be directed acyclic")
|
532 |
+
|
533 |
+
try:
|
534 |
+
x = {x} if x in G else x
|
535 |
+
y = {y} if y in G else y
|
536 |
+
z = {z} if z in G else z
|
537 |
+
|
538 |
+
if included is None:
|
539 |
+
included = set()
|
540 |
+
elif included in G:
|
541 |
+
included = {included}
|
542 |
+
|
543 |
+
if restricted is None:
|
544 |
+
restricted = set(G)
|
545 |
+
elif restricted in G:
|
546 |
+
restricted = {restricted}
|
547 |
+
|
548 |
+
set_y = x | y | included | restricted
|
549 |
+
if set_y - G.nodes:
|
550 |
+
raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G")
|
551 |
+
except TypeError:
|
552 |
+
raise nx.NodeNotFound(
|
553 |
+
"One of x, y, z, included or restricted is not a node or set of nodes in G"
|
554 |
+
)
|
555 |
+
|
556 |
+
if not included <= z:
|
557 |
+
raise nx.NetworkXError(
|
558 |
+
f"Included nodes {included} must be in proposed separating set z {x}"
|
559 |
+
)
|
560 |
+
if not z <= restricted:
|
561 |
+
raise nx.NetworkXError(
|
562 |
+
f"Separating set {z} must be contained in restricted set {restricted}"
|
563 |
+
)
|
564 |
+
|
565 |
+
intersection = x.intersection(y) or x.intersection(z) or y.intersection(z)
|
566 |
+
if intersection:
|
567 |
+
raise nx.NetworkXError(
|
568 |
+
f"The sets are not disjoint, with intersection {intersection}"
|
569 |
+
)
|
570 |
+
|
571 |
+
nodeset = x | y | included
|
572 |
+
ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, n) for n in nodeset])
|
573 |
+
|
574 |
+
# criterion (a) -- check that z is actually a separator
|
575 |
+
x_closure = _reachable(G, x, ancestors_x_y_included, z)
|
576 |
+
if x_closure & y:
|
577 |
+
return False
|
578 |
+
|
579 |
+
# criterion (b) -- basic constraint; included and restricted already checked above
|
580 |
+
if not (z <= ancestors_x_y_included):
|
581 |
+
return False
|
582 |
+
|
583 |
+
# criterion (c) -- check that z is minimal
|
584 |
+
y_closure = _reachable(G, y, ancestors_x_y_included, z)
|
585 |
+
if not ((z - included) <= (x_closure & y_closure)):
|
586 |
+
return False
|
587 |
+
return True
|
588 |
+
|
589 |
+
|
590 |
+
@not_implemented_for("undirected")
|
591 |
+
def _reachable(G, x, a, z):
|
592 |
+
"""Modified Bayes-Ball algorithm for finding d-connected nodes.
|
593 |
+
|
594 |
+
Find all nodes in `a` that are d-connected to those in `x` by
|
595 |
+
those in `z`. This is an implementation of the function
|
596 |
+
`REACHABLE` in [1]_ (which is itself a modification of the
|
597 |
+
Bayes-Ball algorithm [2]_) when restricted to DAGs.
|
598 |
+
|
599 |
+
Parameters
|
600 |
+
----------
|
601 |
+
G : nx.DiGraph
|
602 |
+
A NetworkX DAG.
|
603 |
+
x : node | set
|
604 |
+
A node in the DAG, or a set of nodes.
|
605 |
+
a : node | set
|
606 |
+
A (set of) node(s) in the DAG containing the ancestors of `x`.
|
607 |
+
z : node | set
|
608 |
+
The node or set of nodes conditioned on when checking d-connectedness.
|
609 |
+
|
610 |
+
Returns
|
611 |
+
-------
|
612 |
+
w : set
|
613 |
+
The closure of `x` in `a` with respect to d-connectedness
|
614 |
+
given `z`.
|
615 |
+
|
616 |
+
References
|
617 |
+
----------
|
618 |
+
.. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding
|
619 |
+
minimal d-separators in linear time and applications." In
|
620 |
+
Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020.
|
621 |
+
|
622 |
+
.. [2] Shachter, Ross D. "Bayes-ball: The rational pastime
|
623 |
+
(for determining irrelevance and requisite information in
|
624 |
+
belief networks and influence diagrams)." In Proceedings of the
|
625 |
+
Fourteenth Conference on Uncertainty in Artificial Intelligence
|
626 |
+
(UAI), (pp. 480–487). 1998.
|
627 |
+
"""
|
628 |
+
|
629 |
+
def _pass(e, v, f, n):
|
630 |
+
"""Whether a ball entering node `v` along edge `e` passes to `n` along `f`.
|
631 |
+
|
632 |
+
Boolean function defined on page 6 of [1]_.
|
633 |
+
|
634 |
+
Parameters
|
635 |
+
----------
|
636 |
+
e : bool
|
637 |
+
Directed edge by which the ball got to node `v`; `True` iff directed into `v`.
|
638 |
+
v : node
|
639 |
+
Node where the ball is.
|
640 |
+
f : bool
|
641 |
+
Directed edge connecting nodes `v` and `n`; `True` iff directed `n`.
|
642 |
+
n : node
|
643 |
+
Checking whether the ball passes to this node.
|
644 |
+
|
645 |
+
Returns
|
646 |
+
-------
|
647 |
+
b : bool
|
648 |
+
Whether the ball passes or not.
|
649 |
+
|
650 |
+
References
|
651 |
+
----------
|
652 |
+
.. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding
|
653 |
+
minimal d-separators in linear time and applications." In
|
654 |
+
Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020.
|
655 |
+
"""
|
656 |
+
is_element_of_A = n in a
|
657 |
+
# almost_definite_status = True # always true for DAGs; not so for RCGs
|
658 |
+
collider_if_in_Z = v not in z or (e and not f)
|
659 |
+
return is_element_of_A and collider_if_in_Z # and almost_definite_status
|
660 |
+
|
661 |
+
queue = deque([])
|
662 |
+
for node in x:
|
663 |
+
if bool(G.pred[node]):
|
664 |
+
queue.append((True, node))
|
665 |
+
if bool(G.succ[node]):
|
666 |
+
queue.append((False, node))
|
667 |
+
processed = queue.copy()
|
668 |
+
|
669 |
+
while any(queue):
|
670 |
+
e, v = queue.popleft()
|
671 |
+
preds = ((False, n) for n in G.pred[v])
|
672 |
+
succs = ((True, n) for n in G.succ[v])
|
673 |
+
f_n_pairs = chain(preds, succs)
|
674 |
+
for f, n in f_n_pairs:
|
675 |
+
if (f, n) not in processed and _pass(e, v, f, n):
|
676 |
+
queue.append((f, n))
|
677 |
+
processed.append((f, n))
|
678 |
+
|
679 |
+
return {w for (_, w) in processed}
|
680 |
+
|
681 |
+
|
682 |
+
# Deprecated functions:
|
683 |
+
def d_separated(G, x, y, z):
|
684 |
+
"""Return whether nodes sets ``x`` and ``y`` are d-separated by ``z``.
|
685 |
+
|
686 |
+
.. deprecated:: 3.3
|
687 |
+
|
688 |
+
This function is deprecated and will be removed in NetworkX v3.5.
|
689 |
+
Please use `is_d_separator(G, x, y, z)`.
|
690 |
+
|
691 |
+
"""
|
692 |
+
import warnings
|
693 |
+
|
694 |
+
warnings.warn(
|
695 |
+
"d_separated is deprecated and will be removed in NetworkX v3.5."
|
696 |
+
"Please use `is_d_separator(G, x, y, z)`.",
|
697 |
+
category=DeprecationWarning,
|
698 |
+
stacklevel=2,
|
699 |
+
)
|
700 |
+
return nx.is_d_separator(G, x, y, z)
|
701 |
+
|
702 |
+
|
703 |
+
def minimal_d_separator(G, u, v):
|
704 |
+
"""Returns a minimal_d-separating set between `x` and `y` if possible
|
705 |
+
|
706 |
+
.. deprecated:: 3.3
|
707 |
+
|
708 |
+
minimal_d_separator is deprecated and will be removed in NetworkX v3.5.
|
709 |
+
Please use `find_minimal_d_separator(G, x, y)`.
|
710 |
+
|
711 |
+
"""
|
712 |
+
import warnings
|
713 |
+
|
714 |
+
warnings.warn(
|
715 |
+
(
|
716 |
+
"This function is deprecated and will be removed in NetworkX v3.5."
|
717 |
+
"Please use `is_d_separator(G, x, y)`."
|
718 |
+
),
|
719 |
+
category=DeprecationWarning,
|
720 |
+
stacklevel=2,
|
721 |
+
)
|
722 |
+
return nx.find_minimal_d_separator(G, u, v)
|
venv/lib/python3.10/site-packages/networkx/algorithms/dag.py
ADDED
@@ -0,0 +1,1259 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Algorithms for directed acyclic graphs (DAGs).
|
2 |
+
|
3 |
+
Note that most of these functions are only guaranteed to work for DAGs.
|
4 |
+
In general, these functions do not check for acyclic-ness, so it is up
|
5 |
+
to the user to check for that.
|
6 |
+
"""
|
7 |
+
|
8 |
+
import heapq
|
9 |
+
from collections import deque
|
10 |
+
from functools import partial
|
11 |
+
from itertools import chain, combinations, product, starmap
|
12 |
+
from math import gcd
|
13 |
+
|
14 |
+
import networkx as nx
|
15 |
+
from networkx.utils import arbitrary_element, not_implemented_for, pairwise
|
16 |
+
|
17 |
+
__all__ = [
|
18 |
+
"descendants",
|
19 |
+
"ancestors",
|
20 |
+
"topological_sort",
|
21 |
+
"lexicographical_topological_sort",
|
22 |
+
"all_topological_sorts",
|
23 |
+
"topological_generations",
|
24 |
+
"is_directed_acyclic_graph",
|
25 |
+
"is_aperiodic",
|
26 |
+
"transitive_closure",
|
27 |
+
"transitive_closure_dag",
|
28 |
+
"transitive_reduction",
|
29 |
+
"antichains",
|
30 |
+
"dag_longest_path",
|
31 |
+
"dag_longest_path_length",
|
32 |
+
"dag_to_branching",
|
33 |
+
"compute_v_structures",
|
34 |
+
]
|
35 |
+
|
36 |
+
chaini = chain.from_iterable
|
37 |
+
|
38 |
+
|
39 |
+
@nx._dispatchable
|
40 |
+
def descendants(G, source):
|
41 |
+
"""Returns all nodes reachable from `source` in `G`.
|
42 |
+
|
43 |
+
Parameters
|
44 |
+
----------
|
45 |
+
G : NetworkX Graph
|
46 |
+
source : node in `G`
|
47 |
+
|
48 |
+
Returns
|
49 |
+
-------
|
50 |
+
set()
|
51 |
+
The descendants of `source` in `G`
|
52 |
+
|
53 |
+
Raises
|
54 |
+
------
|
55 |
+
NetworkXError
|
56 |
+
If node `source` is not in `G`.
|
57 |
+
|
58 |
+
Examples
|
59 |
+
--------
|
60 |
+
>>> DG = nx.path_graph(5, create_using=nx.DiGraph)
|
61 |
+
>>> sorted(nx.descendants(DG, 2))
|
62 |
+
[3, 4]
|
63 |
+
|
64 |
+
The `source` node is not a descendant of itself, but can be included manually:
|
65 |
+
|
66 |
+
>>> sorted(nx.descendants(DG, 2) | {2})
|
67 |
+
[2, 3, 4]
|
68 |
+
|
69 |
+
See also
|
70 |
+
--------
|
71 |
+
ancestors
|
72 |
+
"""
|
73 |
+
return {child for parent, child in nx.bfs_edges(G, source)}
|
74 |
+
|
75 |
+
|
76 |
+
@nx._dispatchable
|
77 |
+
def ancestors(G, source):
|
78 |
+
"""Returns all nodes having a path to `source` in `G`.
|
79 |
+
|
80 |
+
Parameters
|
81 |
+
----------
|
82 |
+
G : NetworkX Graph
|
83 |
+
source : node in `G`
|
84 |
+
|
85 |
+
Returns
|
86 |
+
-------
|
87 |
+
set()
|
88 |
+
The ancestors of `source` in `G`
|
89 |
+
|
90 |
+
Raises
|
91 |
+
------
|
92 |
+
NetworkXError
|
93 |
+
If node `source` is not in `G`.
|
94 |
+
|
95 |
+
Examples
|
96 |
+
--------
|
97 |
+
>>> DG = nx.path_graph(5, create_using=nx.DiGraph)
|
98 |
+
>>> sorted(nx.ancestors(DG, 2))
|
99 |
+
[0, 1]
|
100 |
+
|
101 |
+
The `source` node is not an ancestor of itself, but can be included manually:
|
102 |
+
|
103 |
+
>>> sorted(nx.ancestors(DG, 2) | {2})
|
104 |
+
[0, 1, 2]
|
105 |
+
|
106 |
+
See also
|
107 |
+
--------
|
108 |
+
descendants
|
109 |
+
"""
|
110 |
+
return {child for parent, child in nx.bfs_edges(G, source, reverse=True)}
|
111 |
+
|
112 |
+
|
113 |
+
@nx._dispatchable
|
114 |
+
def has_cycle(G):
|
115 |
+
"""Decides whether the directed graph has a cycle."""
|
116 |
+
try:
|
117 |
+
# Feed the entire iterator into a zero-length deque.
|
118 |
+
deque(topological_sort(G), maxlen=0)
|
119 |
+
except nx.NetworkXUnfeasible:
|
120 |
+
return True
|
121 |
+
else:
|
122 |
+
return False
|
123 |
+
|
124 |
+
|
125 |
+
@nx._dispatchable
|
126 |
+
def is_directed_acyclic_graph(G):
|
127 |
+
"""Returns True if the graph `G` is a directed acyclic graph (DAG) or
|
128 |
+
False if not.
|
129 |
+
|
130 |
+
Parameters
|
131 |
+
----------
|
132 |
+
G : NetworkX graph
|
133 |
+
|
134 |
+
Returns
|
135 |
+
-------
|
136 |
+
bool
|
137 |
+
True if `G` is a DAG, False otherwise
|
138 |
+
|
139 |
+
Examples
|
140 |
+
--------
|
141 |
+
Undirected graph::
|
142 |
+
|
143 |
+
>>> G = nx.Graph([(1, 2), (2, 3)])
|
144 |
+
>>> nx.is_directed_acyclic_graph(G)
|
145 |
+
False
|
146 |
+
|
147 |
+
Directed graph with cycle::
|
148 |
+
|
149 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
|
150 |
+
>>> nx.is_directed_acyclic_graph(G)
|
151 |
+
False
|
152 |
+
|
153 |
+
Directed acyclic graph::
|
154 |
+
|
155 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3)])
|
156 |
+
>>> nx.is_directed_acyclic_graph(G)
|
157 |
+
True
|
158 |
+
|
159 |
+
See also
|
160 |
+
--------
|
161 |
+
topological_sort
|
162 |
+
"""
|
163 |
+
return G.is_directed() and not has_cycle(G)
|
164 |
+
|
165 |
+
|
166 |
+
@nx._dispatchable
|
167 |
+
def topological_generations(G):
|
168 |
+
"""Stratifies a DAG into generations.
|
169 |
+
|
170 |
+
A topological generation is node collection in which ancestors of a node in each
|
171 |
+
generation are guaranteed to be in a previous generation, and any descendants of
|
172 |
+
a node are guaranteed to be in a following generation. Nodes are guaranteed to
|
173 |
+
be in the earliest possible generation that they can belong to.
|
174 |
+
|
175 |
+
Parameters
|
176 |
+
----------
|
177 |
+
G : NetworkX digraph
|
178 |
+
A directed acyclic graph (DAG)
|
179 |
+
|
180 |
+
Yields
|
181 |
+
------
|
182 |
+
sets of nodes
|
183 |
+
Yields sets of nodes representing each generation.
|
184 |
+
|
185 |
+
Raises
|
186 |
+
------
|
187 |
+
NetworkXError
|
188 |
+
Generations are defined for directed graphs only. If the graph
|
189 |
+
`G` is undirected, a :exc:`NetworkXError` is raised.
|
190 |
+
|
191 |
+
NetworkXUnfeasible
|
192 |
+
If `G` is not a directed acyclic graph (DAG) no topological generations
|
193 |
+
exist and a :exc:`NetworkXUnfeasible` exception is raised. This can also
|
194 |
+
be raised if `G` is changed while the returned iterator is being processed
|
195 |
+
|
196 |
+
RuntimeError
|
197 |
+
If `G` is changed while the returned iterator is being processed.
|
198 |
+
|
199 |
+
Examples
|
200 |
+
--------
|
201 |
+
>>> DG = nx.DiGraph([(2, 1), (3, 1)])
|
202 |
+
>>> [sorted(generation) for generation in nx.topological_generations(DG)]
|
203 |
+
[[2, 3], [1]]
|
204 |
+
|
205 |
+
Notes
|
206 |
+
-----
|
207 |
+
The generation in which a node resides can also be determined by taking the
|
208 |
+
max-path-distance from the node to the farthest leaf node. That value can
|
209 |
+
be obtained with this function using `enumerate(topological_generations(G))`.
|
210 |
+
|
211 |
+
See also
|
212 |
+
--------
|
213 |
+
topological_sort
|
214 |
+
"""
|
215 |
+
if not G.is_directed():
|
216 |
+
raise nx.NetworkXError("Topological sort not defined on undirected graphs.")
|
217 |
+
|
218 |
+
multigraph = G.is_multigraph()
|
219 |
+
indegree_map = {v: d for v, d in G.in_degree() if d > 0}
|
220 |
+
zero_indegree = [v for v, d in G.in_degree() if d == 0]
|
221 |
+
|
222 |
+
while zero_indegree:
|
223 |
+
this_generation = zero_indegree
|
224 |
+
zero_indegree = []
|
225 |
+
for node in this_generation:
|
226 |
+
if node not in G:
|
227 |
+
raise RuntimeError("Graph changed during iteration")
|
228 |
+
for child in G.neighbors(node):
|
229 |
+
try:
|
230 |
+
indegree_map[child] -= len(G[node][child]) if multigraph else 1
|
231 |
+
except KeyError as err:
|
232 |
+
raise RuntimeError("Graph changed during iteration") from err
|
233 |
+
if indegree_map[child] == 0:
|
234 |
+
zero_indegree.append(child)
|
235 |
+
del indegree_map[child]
|
236 |
+
yield this_generation
|
237 |
+
|
238 |
+
if indegree_map:
|
239 |
+
raise nx.NetworkXUnfeasible(
|
240 |
+
"Graph contains a cycle or graph changed during iteration"
|
241 |
+
)
|
242 |
+
|
243 |
+
|
244 |
+
@nx._dispatchable
|
245 |
+
def topological_sort(G):
|
246 |
+
"""Returns a generator of nodes in topologically sorted order.
|
247 |
+
|
248 |
+
A topological sort is a nonunique permutation of the nodes of a
|
249 |
+
directed graph such that an edge from u to v implies that u
|
250 |
+
appears before v in the topological sort order. This ordering is
|
251 |
+
valid only if the graph has no directed cycles.
|
252 |
+
|
253 |
+
Parameters
|
254 |
+
----------
|
255 |
+
G : NetworkX digraph
|
256 |
+
A directed acyclic graph (DAG)
|
257 |
+
|
258 |
+
Yields
|
259 |
+
------
|
260 |
+
nodes
|
261 |
+
Yields the nodes in topological sorted order.
|
262 |
+
|
263 |
+
Raises
|
264 |
+
------
|
265 |
+
NetworkXError
|
266 |
+
Topological sort is defined for directed graphs only. If the graph `G`
|
267 |
+
is undirected, a :exc:`NetworkXError` is raised.
|
268 |
+
|
269 |
+
NetworkXUnfeasible
|
270 |
+
If `G` is not a directed acyclic graph (DAG) no topological sort exists
|
271 |
+
and a :exc:`NetworkXUnfeasible` exception is raised. This can also be
|
272 |
+
raised if `G` is changed while the returned iterator is being processed
|
273 |
+
|
274 |
+
RuntimeError
|
275 |
+
If `G` is changed while the returned iterator is being processed.
|
276 |
+
|
277 |
+
Examples
|
278 |
+
--------
|
279 |
+
To get the reverse order of the topological sort:
|
280 |
+
|
281 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
282 |
+
>>> list(reversed(list(nx.topological_sort(DG))))
|
283 |
+
[3, 2, 1]
|
284 |
+
|
285 |
+
If your DiGraph naturally has the edges representing tasks/inputs
|
286 |
+
and nodes representing people/processes that initiate tasks, then
|
287 |
+
topological_sort is not quite what you need. You will have to change
|
288 |
+
the tasks to nodes with dependence reflected by edges. The result is
|
289 |
+
a kind of topological sort of the edges. This can be done
|
290 |
+
with :func:`networkx.line_graph` as follows:
|
291 |
+
|
292 |
+
>>> list(nx.topological_sort(nx.line_graph(DG)))
|
293 |
+
[(1, 2), (2, 3)]
|
294 |
+
|
295 |
+
Notes
|
296 |
+
-----
|
297 |
+
This algorithm is based on a description and proof in
|
298 |
+
"Introduction to Algorithms: A Creative Approach" [1]_ .
|
299 |
+
|
300 |
+
See also
|
301 |
+
--------
|
302 |
+
is_directed_acyclic_graph, lexicographical_topological_sort
|
303 |
+
|
304 |
+
References
|
305 |
+
----------
|
306 |
+
.. [1] Manber, U. (1989).
|
307 |
+
*Introduction to Algorithms - A Creative Approach.* Addison-Wesley.
|
308 |
+
"""
|
309 |
+
for generation in nx.topological_generations(G):
|
310 |
+
yield from generation
|
311 |
+
|
312 |
+
|
313 |
+
@nx._dispatchable
|
314 |
+
def lexicographical_topological_sort(G, key=None):
|
315 |
+
"""Generate the nodes in the unique lexicographical topological sort order.
|
316 |
+
|
317 |
+
Generates a unique ordering of nodes by first sorting topologically (for which there are often
|
318 |
+
multiple valid orderings) and then additionally by sorting lexicographically.
|
319 |
+
|
320 |
+
A topological sort arranges the nodes of a directed graph so that the
|
321 |
+
upstream node of each directed edge precedes the downstream node.
|
322 |
+
It is always possible to find a solution for directed graphs that have no cycles.
|
323 |
+
There may be more than one valid solution.
|
324 |
+
|
325 |
+
Lexicographical sorting is just sorting alphabetically. It is used here to break ties in the
|
326 |
+
topological sort and to determine a single, unique ordering. This can be useful in comparing
|
327 |
+
sort results.
|
328 |
+
|
329 |
+
The lexicographical order can be customized by providing a function to the `key=` parameter.
|
330 |
+
The definition of the key function is the same as used in python's built-in `sort()`.
|
331 |
+
The function takes a single argument and returns a key to use for sorting purposes.
|
332 |
+
|
333 |
+
Lexicographical sorting can fail if the node names are un-sortable. See the example below.
|
334 |
+
The solution is to provide a function to the `key=` argument that returns sortable keys.
|
335 |
+
|
336 |
+
|
337 |
+
Parameters
|
338 |
+
----------
|
339 |
+
G : NetworkX digraph
|
340 |
+
A directed acyclic graph (DAG)
|
341 |
+
|
342 |
+
key : function, optional
|
343 |
+
A function of one argument that converts a node name to a comparison key.
|
344 |
+
It defines and resolves ambiguities in the sort order. Defaults to the identity function.
|
345 |
+
|
346 |
+
Yields
|
347 |
+
------
|
348 |
+
nodes
|
349 |
+
Yields the nodes of G in lexicographical topological sort order.
|
350 |
+
|
351 |
+
Raises
|
352 |
+
------
|
353 |
+
NetworkXError
|
354 |
+
Topological sort is defined for directed graphs only. If the graph `G`
|
355 |
+
is undirected, a :exc:`NetworkXError` is raised.
|
356 |
+
|
357 |
+
NetworkXUnfeasible
|
358 |
+
If `G` is not a directed acyclic graph (DAG) no topological sort exists
|
359 |
+
and a :exc:`NetworkXUnfeasible` exception is raised. This can also be
|
360 |
+
raised if `G` is changed while the returned iterator is being processed
|
361 |
+
|
362 |
+
RuntimeError
|
363 |
+
If `G` is changed while the returned iterator is being processed.
|
364 |
+
|
365 |
+
TypeError
|
366 |
+
Results from un-sortable node names.
|
367 |
+
Consider using `key=` parameter to resolve ambiguities in the sort order.
|
368 |
+
|
369 |
+
Examples
|
370 |
+
--------
|
371 |
+
>>> DG = nx.DiGraph([(2, 1), (2, 5), (1, 3), (1, 4), (5, 4)])
|
372 |
+
>>> list(nx.lexicographical_topological_sort(DG))
|
373 |
+
[2, 1, 3, 5, 4]
|
374 |
+
>>> list(nx.lexicographical_topological_sort(DG, key=lambda x: -x))
|
375 |
+
[2, 5, 1, 4, 3]
|
376 |
+
|
377 |
+
The sort will fail for any graph with integer and string nodes. Comparison of integer to strings
|
378 |
+
is not defined in python. Is 3 greater or less than 'red'?
|
379 |
+
|
380 |
+
>>> DG = nx.DiGraph([(1, "red"), (3, "red"), (1, "green"), (2, "blue")])
|
381 |
+
>>> list(nx.lexicographical_topological_sort(DG))
|
382 |
+
Traceback (most recent call last):
|
383 |
+
...
|
384 |
+
TypeError: '<' not supported between instances of 'str' and 'int'
|
385 |
+
...
|
386 |
+
|
387 |
+
Incomparable nodes can be resolved using a `key` function. This example function
|
388 |
+
allows comparison of integers and strings by returning a tuple where the first
|
389 |
+
element is True for `str`, False otherwise. The second element is the node name.
|
390 |
+
This groups the strings and integers separately so they can be compared only among themselves.
|
391 |
+
|
392 |
+
>>> key = lambda node: (isinstance(node, str), node)
|
393 |
+
>>> list(nx.lexicographical_topological_sort(DG, key=key))
|
394 |
+
[1, 2, 3, 'blue', 'green', 'red']
|
395 |
+
|
396 |
+
Notes
|
397 |
+
-----
|
398 |
+
This algorithm is based on a description and proof in
|
399 |
+
"Introduction to Algorithms: A Creative Approach" [1]_ .
|
400 |
+
|
401 |
+
See also
|
402 |
+
--------
|
403 |
+
topological_sort
|
404 |
+
|
405 |
+
References
|
406 |
+
----------
|
407 |
+
.. [1] Manber, U. (1989).
|
408 |
+
*Introduction to Algorithms - A Creative Approach.* Addison-Wesley.
|
409 |
+
"""
|
410 |
+
if not G.is_directed():
|
411 |
+
msg = "Topological sort not defined on undirected graphs."
|
412 |
+
raise nx.NetworkXError(msg)
|
413 |
+
|
414 |
+
if key is None:
|
415 |
+
|
416 |
+
def key(node):
|
417 |
+
return node
|
418 |
+
|
419 |
+
nodeid_map = {n: i for i, n in enumerate(G)}
|
420 |
+
|
421 |
+
def create_tuple(node):
|
422 |
+
return key(node), nodeid_map[node], node
|
423 |
+
|
424 |
+
indegree_map = {v: d for v, d in G.in_degree() if d > 0}
|
425 |
+
# These nodes have zero indegree and ready to be returned.
|
426 |
+
zero_indegree = [create_tuple(v) for v, d in G.in_degree() if d == 0]
|
427 |
+
heapq.heapify(zero_indegree)
|
428 |
+
|
429 |
+
while zero_indegree:
|
430 |
+
_, _, node = heapq.heappop(zero_indegree)
|
431 |
+
|
432 |
+
if node not in G:
|
433 |
+
raise RuntimeError("Graph changed during iteration")
|
434 |
+
for _, child in G.edges(node):
|
435 |
+
try:
|
436 |
+
indegree_map[child] -= 1
|
437 |
+
except KeyError as err:
|
438 |
+
raise RuntimeError("Graph changed during iteration") from err
|
439 |
+
if indegree_map[child] == 0:
|
440 |
+
try:
|
441 |
+
heapq.heappush(zero_indegree, create_tuple(child))
|
442 |
+
except TypeError as err:
|
443 |
+
raise TypeError(
|
444 |
+
f"{err}\nConsider using `key=` parameter to resolve ambiguities in the sort order."
|
445 |
+
)
|
446 |
+
del indegree_map[child]
|
447 |
+
|
448 |
+
yield node
|
449 |
+
|
450 |
+
if indegree_map:
|
451 |
+
msg = "Graph contains a cycle or graph changed during iteration"
|
452 |
+
raise nx.NetworkXUnfeasible(msg)
|
453 |
+
|
454 |
+
|
455 |
+
@not_implemented_for("undirected")
|
456 |
+
@nx._dispatchable
|
457 |
+
def all_topological_sorts(G):
|
458 |
+
"""Returns a generator of _all_ topological sorts of the directed graph G.
|
459 |
+
|
460 |
+
A topological sort is a nonunique permutation of the nodes such that an
|
461 |
+
edge from u to v implies that u appears before v in the topological sort
|
462 |
+
order.
|
463 |
+
|
464 |
+
Parameters
|
465 |
+
----------
|
466 |
+
G : NetworkX DiGraph
|
467 |
+
A directed graph
|
468 |
+
|
469 |
+
Yields
|
470 |
+
------
|
471 |
+
topological_sort_order : list
|
472 |
+
a list of nodes in `G`, representing one of the topological sort orders
|
473 |
+
|
474 |
+
Raises
|
475 |
+
------
|
476 |
+
NetworkXNotImplemented
|
477 |
+
If `G` is not directed
|
478 |
+
NetworkXUnfeasible
|
479 |
+
If `G` is not acyclic
|
480 |
+
|
481 |
+
Examples
|
482 |
+
--------
|
483 |
+
To enumerate all topological sorts of directed graph:
|
484 |
+
|
485 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (2, 4)])
|
486 |
+
>>> list(nx.all_topological_sorts(DG))
|
487 |
+
[[1, 2, 4, 3], [1, 2, 3, 4]]
|
488 |
+
|
489 |
+
Notes
|
490 |
+
-----
|
491 |
+
Implements an iterative version of the algorithm given in [1].
|
492 |
+
|
493 |
+
References
|
494 |
+
----------
|
495 |
+
.. [1] Knuth, Donald E., Szwarcfiter, Jayme L. (1974).
|
496 |
+
"A Structured Program to Generate All Topological Sorting Arrangements"
|
497 |
+
Information Processing Letters, Volume 2, Issue 6, 1974, Pages 153-157,
|
498 |
+
ISSN 0020-0190,
|
499 |
+
https://doi.org/10.1016/0020-0190(74)90001-5.
|
500 |
+
Elsevier (North-Holland), Amsterdam
|
501 |
+
"""
|
502 |
+
if not G.is_directed():
|
503 |
+
raise nx.NetworkXError("Topological sort not defined on undirected graphs.")
|
504 |
+
|
505 |
+
# the names of count and D are chosen to match the global variables in [1]
|
506 |
+
# number of edges originating in a vertex v
|
507 |
+
count = dict(G.in_degree())
|
508 |
+
# vertices with indegree 0
|
509 |
+
D = deque([v for v, d in G.in_degree() if d == 0])
|
510 |
+
# stack of first value chosen at a position k in the topological sort
|
511 |
+
bases = []
|
512 |
+
current_sort = []
|
513 |
+
|
514 |
+
# do-while construct
|
515 |
+
while True:
|
516 |
+
assert all(count[v] == 0 for v in D)
|
517 |
+
|
518 |
+
if len(current_sort) == len(G):
|
519 |
+
yield list(current_sort)
|
520 |
+
|
521 |
+
# clean-up stack
|
522 |
+
while len(current_sort) > 0:
|
523 |
+
assert len(bases) == len(current_sort)
|
524 |
+
q = current_sort.pop()
|
525 |
+
|
526 |
+
# "restores" all edges (q, x)
|
527 |
+
# NOTE: it is important to iterate over edges instead
|
528 |
+
# of successors, so count is updated correctly in multigraphs
|
529 |
+
for _, j in G.out_edges(q):
|
530 |
+
count[j] += 1
|
531 |
+
assert count[j] >= 0
|
532 |
+
# remove entries from D
|
533 |
+
while len(D) > 0 and count[D[-1]] > 0:
|
534 |
+
D.pop()
|
535 |
+
|
536 |
+
# corresponds to a circular shift of the values in D
|
537 |
+
# if the first value chosen (the base) is in the first
|
538 |
+
# position of D again, we are done and need to consider the
|
539 |
+
# previous condition
|
540 |
+
D.appendleft(q)
|
541 |
+
if D[-1] == bases[-1]:
|
542 |
+
# all possible values have been chosen at current position
|
543 |
+
# remove corresponding marker
|
544 |
+
bases.pop()
|
545 |
+
else:
|
546 |
+
# there are still elements that have not been fixed
|
547 |
+
# at the current position in the topological sort
|
548 |
+
# stop removing elements, escape inner loop
|
549 |
+
break
|
550 |
+
|
551 |
+
else:
|
552 |
+
if len(D) == 0:
|
553 |
+
raise nx.NetworkXUnfeasible("Graph contains a cycle.")
|
554 |
+
|
555 |
+
# choose next node
|
556 |
+
q = D.pop()
|
557 |
+
# "erase" all edges (q, x)
|
558 |
+
# NOTE: it is important to iterate over edges instead
|
559 |
+
# of successors, so count is updated correctly in multigraphs
|
560 |
+
for _, j in G.out_edges(q):
|
561 |
+
count[j] -= 1
|
562 |
+
assert count[j] >= 0
|
563 |
+
if count[j] == 0:
|
564 |
+
D.append(j)
|
565 |
+
current_sort.append(q)
|
566 |
+
|
567 |
+
# base for current position might _not_ be fixed yet
|
568 |
+
if len(bases) < len(current_sort):
|
569 |
+
bases.append(q)
|
570 |
+
|
571 |
+
if len(bases) == 0:
|
572 |
+
break
|
573 |
+
|
574 |
+
|
575 |
+
@nx._dispatchable
|
576 |
+
def is_aperiodic(G):
|
577 |
+
"""Returns True if `G` is aperiodic.
|
578 |
+
|
579 |
+
A directed graph is aperiodic if there is no integer k > 1 that
|
580 |
+
divides the length of every cycle in the graph.
|
581 |
+
|
582 |
+
Parameters
|
583 |
+
----------
|
584 |
+
G : NetworkX DiGraph
|
585 |
+
A directed graph
|
586 |
+
|
587 |
+
Returns
|
588 |
+
-------
|
589 |
+
bool
|
590 |
+
True if the graph is aperiodic False otherwise
|
591 |
+
|
592 |
+
Raises
|
593 |
+
------
|
594 |
+
NetworkXError
|
595 |
+
If `G` is not directed
|
596 |
+
|
597 |
+
Examples
|
598 |
+
--------
|
599 |
+
A graph consisting of one cycle, the length of which is 2. Therefore ``k = 2``
|
600 |
+
divides the length of every cycle in the graph and thus the graph
|
601 |
+
is *not aperiodic*::
|
602 |
+
|
603 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 1)])
|
604 |
+
>>> nx.is_aperiodic(DG)
|
605 |
+
False
|
606 |
+
|
607 |
+
A graph consisting of two cycles: one of length 2 and the other of length 3.
|
608 |
+
The cycle lengths are coprime, so there is no single value of k where ``k > 1``
|
609 |
+
that divides each cycle length and therefore the graph is *aperiodic*::
|
610 |
+
|
611 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1), (1, 4), (4, 1)])
|
612 |
+
>>> nx.is_aperiodic(DG)
|
613 |
+
True
|
614 |
+
|
615 |
+
A graph consisting of two cycles: one of length 2 and the other of length 4.
|
616 |
+
The lengths of the cycles share a common factor ``k = 2``, and therefore
|
617 |
+
the graph is *not aperiodic*::
|
618 |
+
|
619 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 1), (3, 4), (4, 5), (5, 6), (6, 3)])
|
620 |
+
>>> nx.is_aperiodic(DG)
|
621 |
+
False
|
622 |
+
|
623 |
+
An acyclic graph, therefore the graph is *not aperiodic*::
|
624 |
+
|
625 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
626 |
+
>>> nx.is_aperiodic(DG)
|
627 |
+
False
|
628 |
+
|
629 |
+
Notes
|
630 |
+
-----
|
631 |
+
This uses the method outlined in [1]_, which runs in $O(m)$ time
|
632 |
+
given $m$ edges in `G`. Note that a graph is not aperiodic if it is
|
633 |
+
acyclic as every integer trivial divides length 0 cycles.
|
634 |
+
|
635 |
+
References
|
636 |
+
----------
|
637 |
+
.. [1] Jarvis, J. P.; Shier, D. R. (1996),
|
638 |
+
"Graph-theoretic analysis of finite Markov chains,"
|
639 |
+
in Shier, D. R.; Wallenius, K. T., Applied Mathematical Modeling:
|
640 |
+
A Multidisciplinary Approach, CRC Press.
|
641 |
+
"""
|
642 |
+
if not G.is_directed():
|
643 |
+
raise nx.NetworkXError("is_aperiodic not defined for undirected graphs")
|
644 |
+
if len(G) == 0:
|
645 |
+
raise nx.NetworkXPointlessConcept("Graph has no nodes.")
|
646 |
+
s = arbitrary_element(G)
|
647 |
+
levels = {s: 0}
|
648 |
+
this_level = [s]
|
649 |
+
g = 0
|
650 |
+
lev = 1
|
651 |
+
while this_level:
|
652 |
+
next_level = []
|
653 |
+
for u in this_level:
|
654 |
+
for v in G[u]:
|
655 |
+
if v in levels: # Non-Tree Edge
|
656 |
+
g = gcd(g, levels[u] - levels[v] + 1)
|
657 |
+
else: # Tree Edge
|
658 |
+
next_level.append(v)
|
659 |
+
levels[v] = lev
|
660 |
+
this_level = next_level
|
661 |
+
lev += 1
|
662 |
+
if len(levels) == len(G): # All nodes in tree
|
663 |
+
return g == 1
|
664 |
+
else:
|
665 |
+
return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels)))
|
666 |
+
|
667 |
+
|
668 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
669 |
+
def transitive_closure(G, reflexive=False):
|
670 |
+
"""Returns transitive closure of a graph
|
671 |
+
|
672 |
+
The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that
|
673 |
+
for all v, w in V there is an edge (v, w) in E+ if and only if there
|
674 |
+
is a path from v to w in G.
|
675 |
+
|
676 |
+
Handling of paths from v to v has some flexibility within this definition.
|
677 |
+
A reflexive transitive closure creates a self-loop for the path
|
678 |
+
from v to v of length 0. The usual transitive closure creates a
|
679 |
+
self-loop only if a cycle exists (a path from v to v with length > 0).
|
680 |
+
We also allow an option for no self-loops.
|
681 |
+
|
682 |
+
Parameters
|
683 |
+
----------
|
684 |
+
G : NetworkX Graph
|
685 |
+
A directed/undirected graph/multigraph.
|
686 |
+
reflexive : Bool or None, optional (default: False)
|
687 |
+
Determines when cycles create self-loops in the Transitive Closure.
|
688 |
+
If True, trivial cycles (length 0) create self-loops. The result
|
689 |
+
is a reflexive transitive closure of G.
|
690 |
+
If False (the default) non-trivial cycles create self-loops.
|
691 |
+
If None, self-loops are not created.
|
692 |
+
|
693 |
+
Returns
|
694 |
+
-------
|
695 |
+
NetworkX graph
|
696 |
+
The transitive closure of `G`
|
697 |
+
|
698 |
+
Raises
|
699 |
+
------
|
700 |
+
NetworkXError
|
701 |
+
If `reflexive` not in `{None, True, False}`
|
702 |
+
|
703 |
+
Examples
|
704 |
+
--------
|
705 |
+
The treatment of trivial (i.e. length 0) cycles is controlled by the
|
706 |
+
`reflexive` parameter.
|
707 |
+
|
708 |
+
Trivial (i.e. length 0) cycles do not create self-loops when
|
709 |
+
``reflexive=False`` (the default)::
|
710 |
+
|
711 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
712 |
+
>>> TC = nx.transitive_closure(DG, reflexive=False)
|
713 |
+
>>> TC.edges()
|
714 |
+
OutEdgeView([(1, 2), (1, 3), (2, 3)])
|
715 |
+
|
716 |
+
However, nontrivial (i.e. length greater than 0) cycles create self-loops
|
717 |
+
when ``reflexive=False`` (the default)::
|
718 |
+
|
719 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
|
720 |
+
>>> TC = nx.transitive_closure(DG, reflexive=False)
|
721 |
+
>>> TC.edges()
|
722 |
+
OutEdgeView([(1, 2), (1, 3), (1, 1), (2, 3), (2, 1), (2, 2), (3, 1), (3, 2), (3, 3)])
|
723 |
+
|
724 |
+
Trivial cycles (length 0) create self-loops when ``reflexive=True``::
|
725 |
+
|
726 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
727 |
+
>>> TC = nx.transitive_closure(DG, reflexive=True)
|
728 |
+
>>> TC.edges()
|
729 |
+
OutEdgeView([(1, 2), (1, 1), (1, 3), (2, 3), (2, 2), (3, 3)])
|
730 |
+
|
731 |
+
And the third option is not to create self-loops at all when ``reflexive=None``::
|
732 |
+
|
733 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
|
734 |
+
>>> TC = nx.transitive_closure(DG, reflexive=None)
|
735 |
+
>>> TC.edges()
|
736 |
+
OutEdgeView([(1, 2), (1, 3), (2, 3), (2, 1), (3, 1), (3, 2)])
|
737 |
+
|
738 |
+
References
|
739 |
+
----------
|
740 |
+
.. [1] https://www.ics.uci.edu/~eppstein/PADS/PartialOrder.py
|
741 |
+
"""
|
742 |
+
TC = G.copy()
|
743 |
+
|
744 |
+
if reflexive not in {None, True, False}:
|
745 |
+
raise nx.NetworkXError("Incorrect value for the parameter `reflexive`")
|
746 |
+
|
747 |
+
for v in G:
|
748 |
+
if reflexive is None:
|
749 |
+
TC.add_edges_from((v, u) for u in nx.descendants(G, v) if u not in TC[v])
|
750 |
+
elif reflexive is True:
|
751 |
+
TC.add_edges_from(
|
752 |
+
(v, u) for u in nx.descendants(G, v) | {v} if u not in TC[v]
|
753 |
+
)
|
754 |
+
elif reflexive is False:
|
755 |
+
TC.add_edges_from((v, e[1]) for e in nx.edge_bfs(G, v) if e[1] not in TC[v])
|
756 |
+
|
757 |
+
return TC
|
758 |
+
|
759 |
+
|
760 |
+
@not_implemented_for("undirected")
|
761 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
762 |
+
def transitive_closure_dag(G, topo_order=None):
|
763 |
+
"""Returns the transitive closure of a directed acyclic graph.
|
764 |
+
|
765 |
+
This function is faster than the function `transitive_closure`, but fails
|
766 |
+
if the graph has a cycle.
|
767 |
+
|
768 |
+
The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that
|
769 |
+
for all v, w in V there is an edge (v, w) in E+ if and only if there
|
770 |
+
is a non-null path from v to w in G.
|
771 |
+
|
772 |
+
Parameters
|
773 |
+
----------
|
774 |
+
G : NetworkX DiGraph
|
775 |
+
A directed acyclic graph (DAG)
|
776 |
+
|
777 |
+
topo_order: list or tuple, optional
|
778 |
+
A topological order for G (if None, the function will compute one)
|
779 |
+
|
780 |
+
Returns
|
781 |
+
-------
|
782 |
+
NetworkX DiGraph
|
783 |
+
The transitive closure of `G`
|
784 |
+
|
785 |
+
Raises
|
786 |
+
------
|
787 |
+
NetworkXNotImplemented
|
788 |
+
If `G` is not directed
|
789 |
+
NetworkXUnfeasible
|
790 |
+
If `G` has a cycle
|
791 |
+
|
792 |
+
Examples
|
793 |
+
--------
|
794 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
795 |
+
>>> TC = nx.transitive_closure_dag(DG)
|
796 |
+
>>> TC.edges()
|
797 |
+
OutEdgeView([(1, 2), (1, 3), (2, 3)])
|
798 |
+
|
799 |
+
Notes
|
800 |
+
-----
|
801 |
+
This algorithm is probably simple enough to be well-known but I didn't find
|
802 |
+
a mention in the literature.
|
803 |
+
"""
|
804 |
+
if topo_order is None:
|
805 |
+
topo_order = list(topological_sort(G))
|
806 |
+
|
807 |
+
TC = G.copy()
|
808 |
+
|
809 |
+
# idea: traverse vertices following a reverse topological order, connecting
|
810 |
+
# each vertex to its descendants at distance 2 as we go
|
811 |
+
for v in reversed(topo_order):
|
812 |
+
TC.add_edges_from((v, u) for u in nx.descendants_at_distance(TC, v, 2))
|
813 |
+
|
814 |
+
return TC
|
815 |
+
|
816 |
+
|
817 |
+
@not_implemented_for("undirected")
|
818 |
+
@nx._dispatchable(returns_graph=True)
|
819 |
+
def transitive_reduction(G):
|
820 |
+
"""Returns transitive reduction of a directed graph
|
821 |
+
|
822 |
+
The transitive reduction of G = (V,E) is a graph G- = (V,E-) such that
|
823 |
+
for all v,w in V there is an edge (v,w) in E- if and only if (v,w) is
|
824 |
+
in E and there is no path from v to w in G with length greater than 1.
|
825 |
+
|
826 |
+
Parameters
|
827 |
+
----------
|
828 |
+
G : NetworkX DiGraph
|
829 |
+
A directed acyclic graph (DAG)
|
830 |
+
|
831 |
+
Returns
|
832 |
+
-------
|
833 |
+
NetworkX DiGraph
|
834 |
+
The transitive reduction of `G`
|
835 |
+
|
836 |
+
Raises
|
837 |
+
------
|
838 |
+
NetworkXError
|
839 |
+
If `G` is not a directed acyclic graph (DAG) transitive reduction is
|
840 |
+
not uniquely defined and a :exc:`NetworkXError` exception is raised.
|
841 |
+
|
842 |
+
Examples
|
843 |
+
--------
|
844 |
+
To perform transitive reduction on a DiGraph:
|
845 |
+
|
846 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (1, 3)])
|
847 |
+
>>> TR = nx.transitive_reduction(DG)
|
848 |
+
>>> list(TR.edges)
|
849 |
+
[(1, 2), (2, 3)]
|
850 |
+
|
851 |
+
To avoid unnecessary data copies, this implementation does not return a
|
852 |
+
DiGraph with node/edge data.
|
853 |
+
To perform transitive reduction on a DiGraph and transfer node/edge data:
|
854 |
+
|
855 |
+
>>> DG = nx.DiGraph()
|
856 |
+
>>> DG.add_edges_from([(1, 2), (2, 3), (1, 3)], color="red")
|
857 |
+
>>> TR = nx.transitive_reduction(DG)
|
858 |
+
>>> TR.add_nodes_from(DG.nodes(data=True))
|
859 |
+
>>> TR.add_edges_from((u, v, DG.edges[u, v]) for u, v in TR.edges)
|
860 |
+
>>> list(TR.edges(data=True))
|
861 |
+
[(1, 2, {'color': 'red'}), (2, 3, {'color': 'red'})]
|
862 |
+
|
863 |
+
References
|
864 |
+
----------
|
865 |
+
https://en.wikipedia.org/wiki/Transitive_reduction
|
866 |
+
|
867 |
+
"""
|
868 |
+
if not is_directed_acyclic_graph(G):
|
869 |
+
msg = "Directed Acyclic Graph required for transitive_reduction"
|
870 |
+
raise nx.NetworkXError(msg)
|
871 |
+
TR = nx.DiGraph()
|
872 |
+
TR.add_nodes_from(G.nodes())
|
873 |
+
descendants = {}
|
874 |
+
# count before removing set stored in descendants
|
875 |
+
check_count = dict(G.in_degree)
|
876 |
+
for u in G:
|
877 |
+
u_nbrs = set(G[u])
|
878 |
+
for v in G[u]:
|
879 |
+
if v in u_nbrs:
|
880 |
+
if v not in descendants:
|
881 |
+
descendants[v] = {y for x, y in nx.dfs_edges(G, v)}
|
882 |
+
u_nbrs -= descendants[v]
|
883 |
+
check_count[v] -= 1
|
884 |
+
if check_count[v] == 0:
|
885 |
+
del descendants[v]
|
886 |
+
TR.add_edges_from((u, v) for v in u_nbrs)
|
887 |
+
return TR
|
888 |
+
|
889 |
+
|
890 |
+
@not_implemented_for("undirected")
|
891 |
+
@nx._dispatchable
|
892 |
+
def antichains(G, topo_order=None):
|
893 |
+
"""Generates antichains from a directed acyclic graph (DAG).
|
894 |
+
|
895 |
+
An antichain is a subset of a partially ordered set such that any
|
896 |
+
two elements in the subset are incomparable.
|
897 |
+
|
898 |
+
Parameters
|
899 |
+
----------
|
900 |
+
G : NetworkX DiGraph
|
901 |
+
A directed acyclic graph (DAG)
|
902 |
+
|
903 |
+
topo_order: list or tuple, optional
|
904 |
+
A topological order for G (if None, the function will compute one)
|
905 |
+
|
906 |
+
Yields
|
907 |
+
------
|
908 |
+
antichain : list
|
909 |
+
a list of nodes in `G` representing an antichain
|
910 |
+
|
911 |
+
Raises
|
912 |
+
------
|
913 |
+
NetworkXNotImplemented
|
914 |
+
If `G` is not directed
|
915 |
+
|
916 |
+
NetworkXUnfeasible
|
917 |
+
If `G` contains a cycle
|
918 |
+
|
919 |
+
Examples
|
920 |
+
--------
|
921 |
+
>>> DG = nx.DiGraph([(1, 2), (1, 3)])
|
922 |
+
>>> list(nx.antichains(DG))
|
923 |
+
[[], [3], [2], [2, 3], [1]]
|
924 |
+
|
925 |
+
Notes
|
926 |
+
-----
|
927 |
+
This function was originally developed by Peter Jipsen and Franco Saliola
|
928 |
+
for the SAGE project. It's included in NetworkX with permission from the
|
929 |
+
authors. Original SAGE code at:
|
930 |
+
|
931 |
+
https://github.com/sagemath/sage/blob/master/src/sage/combinat/posets/hasse_diagram.py
|
932 |
+
|
933 |
+
References
|
934 |
+
----------
|
935 |
+
.. [1] Free Lattices, by R. Freese, J. Jezek and J. B. Nation,
|
936 |
+
AMS, Vol 42, 1995, p. 226.
|
937 |
+
"""
|
938 |
+
if topo_order is None:
|
939 |
+
topo_order = list(nx.topological_sort(G))
|
940 |
+
|
941 |
+
TC = nx.transitive_closure_dag(G, topo_order)
|
942 |
+
antichains_stacks = [([], list(reversed(topo_order)))]
|
943 |
+
|
944 |
+
while antichains_stacks:
|
945 |
+
(antichain, stack) = antichains_stacks.pop()
|
946 |
+
# Invariant:
|
947 |
+
# - the elements of antichain are independent
|
948 |
+
# - the elements of stack are independent from those of antichain
|
949 |
+
yield antichain
|
950 |
+
while stack:
|
951 |
+
x = stack.pop()
|
952 |
+
new_antichain = antichain + [x]
|
953 |
+
new_stack = [t for t in stack if not ((t in TC[x]) or (x in TC[t]))]
|
954 |
+
antichains_stacks.append((new_antichain, new_stack))
|
955 |
+
|
956 |
+
|
957 |
+
@not_implemented_for("undirected")
|
958 |
+
@nx._dispatchable(edge_attrs={"weight": "default_weight"})
|
959 |
+
def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None):
|
960 |
+
"""Returns the longest path in a directed acyclic graph (DAG).
|
961 |
+
|
962 |
+
If `G` has edges with `weight` attribute the edge data are used as
|
963 |
+
weight values.
|
964 |
+
|
965 |
+
Parameters
|
966 |
+
----------
|
967 |
+
G : NetworkX DiGraph
|
968 |
+
A directed acyclic graph (DAG)
|
969 |
+
|
970 |
+
weight : str, optional
|
971 |
+
Edge data key to use for weight
|
972 |
+
|
973 |
+
default_weight : int, optional
|
974 |
+
The weight of edges that do not have a weight attribute
|
975 |
+
|
976 |
+
topo_order: list or tuple, optional
|
977 |
+
A topological order for `G` (if None, the function will compute one)
|
978 |
+
|
979 |
+
Returns
|
980 |
+
-------
|
981 |
+
list
|
982 |
+
Longest path
|
983 |
+
|
984 |
+
Raises
|
985 |
+
------
|
986 |
+
NetworkXNotImplemented
|
987 |
+
If `G` is not directed
|
988 |
+
|
989 |
+
Examples
|
990 |
+
--------
|
991 |
+
>>> DG = nx.DiGraph([(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})])
|
992 |
+
>>> list(nx.all_simple_paths(DG, 0, 2))
|
993 |
+
[[0, 1, 2], [0, 2]]
|
994 |
+
>>> nx.dag_longest_path(DG)
|
995 |
+
[0, 1, 2]
|
996 |
+
>>> nx.dag_longest_path(DG, weight="cost")
|
997 |
+
[0, 2]
|
998 |
+
|
999 |
+
In the case where multiple valid topological orderings exist, `topo_order`
|
1000 |
+
can be used to specify a specific ordering:
|
1001 |
+
|
1002 |
+
>>> DG = nx.DiGraph([(0, 1), (0, 2)])
|
1003 |
+
>>> sorted(nx.all_topological_sorts(DG)) # Valid topological orderings
|
1004 |
+
[[0, 1, 2], [0, 2, 1]]
|
1005 |
+
>>> nx.dag_longest_path(DG, topo_order=[0, 1, 2])
|
1006 |
+
[0, 1]
|
1007 |
+
>>> nx.dag_longest_path(DG, topo_order=[0, 2, 1])
|
1008 |
+
[0, 2]
|
1009 |
+
|
1010 |
+
See also
|
1011 |
+
--------
|
1012 |
+
dag_longest_path_length
|
1013 |
+
|
1014 |
+
"""
|
1015 |
+
if not G:
|
1016 |
+
return []
|
1017 |
+
|
1018 |
+
if topo_order is None:
|
1019 |
+
topo_order = nx.topological_sort(G)
|
1020 |
+
|
1021 |
+
dist = {} # stores {v : (length, u)}
|
1022 |
+
for v in topo_order:
|
1023 |
+
us = [
|
1024 |
+
(
|
1025 |
+
dist[u][0]
|
1026 |
+
+ (
|
1027 |
+
max(data.values(), key=lambda x: x.get(weight, default_weight))
|
1028 |
+
if G.is_multigraph()
|
1029 |
+
else data
|
1030 |
+
).get(weight, default_weight),
|
1031 |
+
u,
|
1032 |
+
)
|
1033 |
+
for u, data in G.pred[v].items()
|
1034 |
+
]
|
1035 |
+
|
1036 |
+
# Use the best predecessor if there is one and its distance is
|
1037 |
+
# non-negative, otherwise terminate.
|
1038 |
+
maxu = max(us, key=lambda x: x[0]) if us else (0, v)
|
1039 |
+
dist[v] = maxu if maxu[0] >= 0 else (0, v)
|
1040 |
+
|
1041 |
+
u = None
|
1042 |
+
v = max(dist, key=lambda x: dist[x][0])
|
1043 |
+
path = []
|
1044 |
+
while u != v:
|
1045 |
+
path.append(v)
|
1046 |
+
u = v
|
1047 |
+
v = dist[v][1]
|
1048 |
+
|
1049 |
+
path.reverse()
|
1050 |
+
return path
|
1051 |
+
|
1052 |
+
|
1053 |
+
@not_implemented_for("undirected")
|
1054 |
+
@nx._dispatchable(edge_attrs={"weight": "default_weight"})
|
1055 |
+
def dag_longest_path_length(G, weight="weight", default_weight=1):
|
1056 |
+
"""Returns the longest path length in a DAG
|
1057 |
+
|
1058 |
+
Parameters
|
1059 |
+
----------
|
1060 |
+
G : NetworkX DiGraph
|
1061 |
+
A directed acyclic graph (DAG)
|
1062 |
+
|
1063 |
+
weight : string, optional
|
1064 |
+
Edge data key to use for weight
|
1065 |
+
|
1066 |
+
default_weight : int, optional
|
1067 |
+
The weight of edges that do not have a weight attribute
|
1068 |
+
|
1069 |
+
Returns
|
1070 |
+
-------
|
1071 |
+
int
|
1072 |
+
Longest path length
|
1073 |
+
|
1074 |
+
Raises
|
1075 |
+
------
|
1076 |
+
NetworkXNotImplemented
|
1077 |
+
If `G` is not directed
|
1078 |
+
|
1079 |
+
Examples
|
1080 |
+
--------
|
1081 |
+
>>> DG = nx.DiGraph([(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})])
|
1082 |
+
>>> list(nx.all_simple_paths(DG, 0, 2))
|
1083 |
+
[[0, 1, 2], [0, 2]]
|
1084 |
+
>>> nx.dag_longest_path_length(DG)
|
1085 |
+
2
|
1086 |
+
>>> nx.dag_longest_path_length(DG, weight="cost")
|
1087 |
+
42
|
1088 |
+
|
1089 |
+
See also
|
1090 |
+
--------
|
1091 |
+
dag_longest_path
|
1092 |
+
"""
|
1093 |
+
path = nx.dag_longest_path(G, weight, default_weight)
|
1094 |
+
path_length = 0
|
1095 |
+
if G.is_multigraph():
|
1096 |
+
for u, v in pairwise(path):
|
1097 |
+
i = max(G[u][v], key=lambda x: G[u][v][x].get(weight, default_weight))
|
1098 |
+
path_length += G[u][v][i].get(weight, default_weight)
|
1099 |
+
else:
|
1100 |
+
for u, v in pairwise(path):
|
1101 |
+
path_length += G[u][v].get(weight, default_weight)
|
1102 |
+
|
1103 |
+
return path_length
|
1104 |
+
|
1105 |
+
|
1106 |
+
@nx._dispatchable
|
1107 |
+
def root_to_leaf_paths(G):
|
1108 |
+
"""Yields root-to-leaf paths in a directed acyclic graph.
|
1109 |
+
|
1110 |
+
`G` must be a directed acyclic graph. If not, the behavior of this
|
1111 |
+
function is undefined. A "root" in this graph is a node of in-degree
|
1112 |
+
zero and a "leaf" a node of out-degree zero.
|
1113 |
+
|
1114 |
+
When invoked, this function iterates over each path from any root to
|
1115 |
+
any leaf. A path is a list of nodes.
|
1116 |
+
|
1117 |
+
"""
|
1118 |
+
roots = (v for v, d in G.in_degree() if d == 0)
|
1119 |
+
leaves = (v for v, d in G.out_degree() if d == 0)
|
1120 |
+
all_paths = partial(nx.all_simple_paths, G)
|
1121 |
+
# TODO In Python 3, this would be better as `yield from ...`.
|
1122 |
+
return chaini(starmap(all_paths, product(roots, leaves)))
|
1123 |
+
|
1124 |
+
|
1125 |
+
@not_implemented_for("multigraph")
|
1126 |
+
@not_implemented_for("undirected")
|
1127 |
+
@nx._dispatchable(returns_graph=True)
|
1128 |
+
def dag_to_branching(G):
|
1129 |
+
"""Returns a branching representing all (overlapping) paths from
|
1130 |
+
root nodes to leaf nodes in the given directed acyclic graph.
|
1131 |
+
|
1132 |
+
As described in :mod:`networkx.algorithms.tree.recognition`, a
|
1133 |
+
*branching* is a directed forest in which each node has at most one
|
1134 |
+
parent. In other words, a branching is a disjoint union of
|
1135 |
+
*arborescences*. For this function, each node of in-degree zero in
|
1136 |
+
`G` becomes a root of one of the arborescences, and there will be
|
1137 |
+
one leaf node for each distinct path from that root to a leaf node
|
1138 |
+
in `G`.
|
1139 |
+
|
1140 |
+
Each node `v` in `G` with *k* parents becomes *k* distinct nodes in
|
1141 |
+
the returned branching, one for each parent, and the sub-DAG rooted
|
1142 |
+
at `v` is duplicated for each copy. The algorithm then recurses on
|
1143 |
+
the children of each copy of `v`.
|
1144 |
+
|
1145 |
+
Parameters
|
1146 |
+
----------
|
1147 |
+
G : NetworkX graph
|
1148 |
+
A directed acyclic graph.
|
1149 |
+
|
1150 |
+
Returns
|
1151 |
+
-------
|
1152 |
+
DiGraph
|
1153 |
+
The branching in which there is a bijection between root-to-leaf
|
1154 |
+
paths in `G` (in which multiple paths may share the same leaf)
|
1155 |
+
and root-to-leaf paths in the branching (in which there is a
|
1156 |
+
unique path from a root to a leaf).
|
1157 |
+
|
1158 |
+
Each node has an attribute 'source' whose value is the original
|
1159 |
+
node to which this node corresponds. No other graph, node, or
|
1160 |
+
edge attributes are copied into this new graph.
|
1161 |
+
|
1162 |
+
Raises
|
1163 |
+
------
|
1164 |
+
NetworkXNotImplemented
|
1165 |
+
If `G` is not directed, or if `G` is a multigraph.
|
1166 |
+
|
1167 |
+
HasACycle
|
1168 |
+
If `G` is not acyclic.
|
1169 |
+
|
1170 |
+
Examples
|
1171 |
+
--------
|
1172 |
+
To examine which nodes in the returned branching were produced by
|
1173 |
+
which original node in the directed acyclic graph, we can collect
|
1174 |
+
the mapping from source node to new nodes into a dictionary. For
|
1175 |
+
example, consider the directed diamond graph::
|
1176 |
+
|
1177 |
+
>>> from collections import defaultdict
|
1178 |
+
>>> from operator import itemgetter
|
1179 |
+
>>>
|
1180 |
+
>>> G = nx.DiGraph(nx.utils.pairwise("abd"))
|
1181 |
+
>>> G.add_edges_from(nx.utils.pairwise("acd"))
|
1182 |
+
>>> B = nx.dag_to_branching(G)
|
1183 |
+
>>>
|
1184 |
+
>>> sources = defaultdict(set)
|
1185 |
+
>>> for v, source in B.nodes(data="source"):
|
1186 |
+
... sources[source].add(v)
|
1187 |
+
>>> len(sources["a"])
|
1188 |
+
1
|
1189 |
+
>>> len(sources["d"])
|
1190 |
+
2
|
1191 |
+
|
1192 |
+
To copy node attributes from the original graph to the new graph,
|
1193 |
+
you can use a dictionary like the one constructed in the above
|
1194 |
+
example::
|
1195 |
+
|
1196 |
+
>>> for source, nodes in sources.items():
|
1197 |
+
... for v in nodes:
|
1198 |
+
... B.nodes[v].update(G.nodes[source])
|
1199 |
+
|
1200 |
+
Notes
|
1201 |
+
-----
|
1202 |
+
This function is not idempotent in the sense that the node labels in
|
1203 |
+
the returned branching may be uniquely generated each time the
|
1204 |
+
function is invoked. In fact, the node labels may not be integers;
|
1205 |
+
in order to relabel the nodes to be more readable, you can use the
|
1206 |
+
:func:`networkx.convert_node_labels_to_integers` function.
|
1207 |
+
|
1208 |
+
The current implementation of this function uses
|
1209 |
+
:func:`networkx.prefix_tree`, so it is subject to the limitations of
|
1210 |
+
that function.
|
1211 |
+
|
1212 |
+
"""
|
1213 |
+
if has_cycle(G):
|
1214 |
+
msg = "dag_to_branching is only defined for acyclic graphs"
|
1215 |
+
raise nx.HasACycle(msg)
|
1216 |
+
paths = root_to_leaf_paths(G)
|
1217 |
+
B = nx.prefix_tree(paths)
|
1218 |
+
# Remove the synthetic `root`(0) and `NIL`(-1) nodes from the tree
|
1219 |
+
B.remove_node(0)
|
1220 |
+
B.remove_node(-1)
|
1221 |
+
return B
|
1222 |
+
|
1223 |
+
|
1224 |
+
@not_implemented_for("undirected")
|
1225 |
+
@nx._dispatchable
|
1226 |
+
def compute_v_structures(G):
|
1227 |
+
"""Iterate through the graph to compute all v-structures.
|
1228 |
+
|
1229 |
+
V-structures are triples in the directed graph where
|
1230 |
+
two parent nodes point to the same child and the two parent nodes
|
1231 |
+
are not adjacent.
|
1232 |
+
|
1233 |
+
Parameters
|
1234 |
+
----------
|
1235 |
+
G : graph
|
1236 |
+
A networkx DiGraph.
|
1237 |
+
|
1238 |
+
Returns
|
1239 |
+
-------
|
1240 |
+
vstructs : iterator of tuples
|
1241 |
+
The v structures within the graph. Each v structure is a 3-tuple with the
|
1242 |
+
parent, collider, and other parent.
|
1243 |
+
|
1244 |
+
Examples
|
1245 |
+
--------
|
1246 |
+
>>> G = nx.DiGraph()
|
1247 |
+
>>> G.add_edges_from([(1, 2), (0, 5), (3, 1), (2, 4), (3, 1), (4, 5), (1, 5)])
|
1248 |
+
>>> sorted(nx.compute_v_structures(G))
|
1249 |
+
[(0, 5, 1), (0, 5, 4), (1, 5, 4)]
|
1250 |
+
|
1251 |
+
Notes
|
1252 |
+
-----
|
1253 |
+
`Wikipedia: Collider in causal graphs <https://en.wikipedia.org/wiki/Collider_(statistics)>`_
|
1254 |
+
"""
|
1255 |
+
for collider, preds in G.pred.items():
|
1256 |
+
for common_parents in combinations(preds, r=2):
|
1257 |
+
# ensure that the colliders are the same
|
1258 |
+
common_parents = sorted(common_parents)
|
1259 |
+
yield (common_parents[0], collider, common_parents[1])
|
venv/lib/python3.10/site-packages/networkx/algorithms/dominance.py
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Dominance algorithms.
|
3 |
+
"""
|
4 |
+
|
5 |
+
from functools import reduce
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
from networkx.utils import not_implemented_for
|
9 |
+
|
10 |
+
__all__ = ["immediate_dominators", "dominance_frontiers"]
|
11 |
+
|
12 |
+
|
13 |
+
@not_implemented_for("undirected")
|
14 |
+
@nx._dispatchable
|
15 |
+
def immediate_dominators(G, start):
|
16 |
+
"""Returns the immediate dominators of all nodes of a directed graph.
|
17 |
+
|
18 |
+
Parameters
|
19 |
+
----------
|
20 |
+
G : a DiGraph or MultiDiGraph
|
21 |
+
The graph where dominance is to be computed.
|
22 |
+
|
23 |
+
start : node
|
24 |
+
The start node of dominance computation.
|
25 |
+
|
26 |
+
Returns
|
27 |
+
-------
|
28 |
+
idom : dict keyed by nodes
|
29 |
+
A dict containing the immediate dominators of each node reachable from
|
30 |
+
`start`.
|
31 |
+
|
32 |
+
Raises
|
33 |
+
------
|
34 |
+
NetworkXNotImplemented
|
35 |
+
If `G` is undirected.
|
36 |
+
|
37 |
+
NetworkXError
|
38 |
+
If `start` is not in `G`.
|
39 |
+
|
40 |
+
Notes
|
41 |
+
-----
|
42 |
+
Except for `start`, the immediate dominators are the parents of their
|
43 |
+
corresponding nodes in the dominator tree.
|
44 |
+
|
45 |
+
Examples
|
46 |
+
--------
|
47 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)])
|
48 |
+
>>> sorted(nx.immediate_dominators(G, 1).items())
|
49 |
+
[(1, 1), (2, 1), (3, 1), (4, 3), (5, 1)]
|
50 |
+
|
51 |
+
References
|
52 |
+
----------
|
53 |
+
.. [1] K. D. Cooper, T. J. Harvey, and K. Kennedy.
|
54 |
+
A simple, fast dominance algorithm.
|
55 |
+
Software Practice & Experience, 4:110, 2001.
|
56 |
+
"""
|
57 |
+
if start not in G:
|
58 |
+
raise nx.NetworkXError("start is not in G")
|
59 |
+
|
60 |
+
idom = {start: start}
|
61 |
+
|
62 |
+
order = list(nx.dfs_postorder_nodes(G, start))
|
63 |
+
dfn = {u: i for i, u in enumerate(order)}
|
64 |
+
order.pop()
|
65 |
+
order.reverse()
|
66 |
+
|
67 |
+
def intersect(u, v):
|
68 |
+
while u != v:
|
69 |
+
while dfn[u] < dfn[v]:
|
70 |
+
u = idom[u]
|
71 |
+
while dfn[u] > dfn[v]:
|
72 |
+
v = idom[v]
|
73 |
+
return u
|
74 |
+
|
75 |
+
changed = True
|
76 |
+
while changed:
|
77 |
+
changed = False
|
78 |
+
for u in order:
|
79 |
+
new_idom = reduce(intersect, (v for v in G.pred[u] if v in idom))
|
80 |
+
if u not in idom or idom[u] != new_idom:
|
81 |
+
idom[u] = new_idom
|
82 |
+
changed = True
|
83 |
+
|
84 |
+
return idom
|
85 |
+
|
86 |
+
|
87 |
+
@nx._dispatchable
|
88 |
+
def dominance_frontiers(G, start):
|
89 |
+
"""Returns the dominance frontiers of all nodes of a directed graph.
|
90 |
+
|
91 |
+
Parameters
|
92 |
+
----------
|
93 |
+
G : a DiGraph or MultiDiGraph
|
94 |
+
The graph where dominance is to be computed.
|
95 |
+
|
96 |
+
start : node
|
97 |
+
The start node of dominance computation.
|
98 |
+
|
99 |
+
Returns
|
100 |
+
-------
|
101 |
+
df : dict keyed by nodes
|
102 |
+
A dict containing the dominance frontiers of each node reachable from
|
103 |
+
`start` as lists.
|
104 |
+
|
105 |
+
Raises
|
106 |
+
------
|
107 |
+
NetworkXNotImplemented
|
108 |
+
If `G` is undirected.
|
109 |
+
|
110 |
+
NetworkXError
|
111 |
+
If `start` is not in `G`.
|
112 |
+
|
113 |
+
Examples
|
114 |
+
--------
|
115 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)])
|
116 |
+
>>> sorted((u, sorted(df)) for u, df in nx.dominance_frontiers(G, 1).items())
|
117 |
+
[(1, []), (2, [5]), (3, [5]), (4, [5]), (5, [])]
|
118 |
+
|
119 |
+
References
|
120 |
+
----------
|
121 |
+
.. [1] K. D. Cooper, T. J. Harvey, and K. Kennedy.
|
122 |
+
A simple, fast dominance algorithm.
|
123 |
+
Software Practice & Experience, 4:110, 2001.
|
124 |
+
"""
|
125 |
+
idom = nx.immediate_dominators(G, start)
|
126 |
+
|
127 |
+
df = {u: set() for u in idom}
|
128 |
+
for u in idom:
|
129 |
+
if len(G.pred[u]) >= 2:
|
130 |
+
for v in G.pred[u]:
|
131 |
+
if v in idom:
|
132 |
+
while v != idom[u]:
|
133 |
+
df[v].add(u)
|
134 |
+
v = idom[v]
|
135 |
+
return df
|
venv/lib/python3.10/site-packages/networkx/algorithms/dominating.py
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing dominating sets in a graph."""
|
2 |
+
from itertools import chain
|
3 |
+
|
4 |
+
import networkx as nx
|
5 |
+
from networkx.utils import arbitrary_element
|
6 |
+
|
7 |
+
__all__ = ["dominating_set", "is_dominating_set"]
|
8 |
+
|
9 |
+
|
10 |
+
@nx._dispatchable
|
11 |
+
def dominating_set(G, start_with=None):
|
12 |
+
r"""Finds a dominating set for the graph G.
|
13 |
+
|
14 |
+
A *dominating set* for a graph with node set *V* is a subset *D* of
|
15 |
+
*V* such that every node not in *D* is adjacent to at least one
|
16 |
+
member of *D* [1]_.
|
17 |
+
|
18 |
+
Parameters
|
19 |
+
----------
|
20 |
+
G : NetworkX graph
|
21 |
+
|
22 |
+
start_with : node (default=None)
|
23 |
+
Node to use as a starting point for the algorithm.
|
24 |
+
|
25 |
+
Returns
|
26 |
+
-------
|
27 |
+
D : set
|
28 |
+
A dominating set for G.
|
29 |
+
|
30 |
+
Notes
|
31 |
+
-----
|
32 |
+
This function is an implementation of algorithm 7 in [2]_ which
|
33 |
+
finds some dominating set, not necessarily the smallest one.
|
34 |
+
|
35 |
+
See also
|
36 |
+
--------
|
37 |
+
is_dominating_set
|
38 |
+
|
39 |
+
References
|
40 |
+
----------
|
41 |
+
.. [1] https://en.wikipedia.org/wiki/Dominating_set
|
42 |
+
|
43 |
+
.. [2] Abdol-Hossein Esfahanian. Connectivity Algorithms.
|
44 |
+
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
|
45 |
+
|
46 |
+
"""
|
47 |
+
all_nodes = set(G)
|
48 |
+
if start_with is None:
|
49 |
+
start_with = arbitrary_element(all_nodes)
|
50 |
+
if start_with not in G:
|
51 |
+
raise nx.NetworkXError(f"node {start_with} is not in G")
|
52 |
+
dominating_set = {start_with}
|
53 |
+
dominated_nodes = set(G[start_with])
|
54 |
+
remaining_nodes = all_nodes - dominated_nodes - dominating_set
|
55 |
+
while remaining_nodes:
|
56 |
+
# Choose an arbitrary node and determine its undominated neighbors.
|
57 |
+
v = remaining_nodes.pop()
|
58 |
+
undominated_nbrs = set(G[v]) - dominating_set
|
59 |
+
# Add the node to the dominating set and the neighbors to the
|
60 |
+
# dominated set. Finally, remove all of those nodes from the set
|
61 |
+
# of remaining nodes.
|
62 |
+
dominating_set.add(v)
|
63 |
+
dominated_nodes |= undominated_nbrs
|
64 |
+
remaining_nodes -= undominated_nbrs
|
65 |
+
return dominating_set
|
66 |
+
|
67 |
+
|
68 |
+
@nx._dispatchable
|
69 |
+
def is_dominating_set(G, nbunch):
|
70 |
+
"""Checks if `nbunch` is a dominating set for `G`.
|
71 |
+
|
72 |
+
A *dominating set* for a graph with node set *V* is a subset *D* of
|
73 |
+
*V* such that every node not in *D* is adjacent to at least one
|
74 |
+
member of *D* [1]_.
|
75 |
+
|
76 |
+
Parameters
|
77 |
+
----------
|
78 |
+
G : NetworkX graph
|
79 |
+
|
80 |
+
nbunch : iterable
|
81 |
+
An iterable of nodes in the graph `G`.
|
82 |
+
|
83 |
+
See also
|
84 |
+
--------
|
85 |
+
dominating_set
|
86 |
+
|
87 |
+
References
|
88 |
+
----------
|
89 |
+
.. [1] https://en.wikipedia.org/wiki/Dominating_set
|
90 |
+
|
91 |
+
"""
|
92 |
+
testset = {n for n in nbunch if n in G}
|
93 |
+
nbrs = set(chain.from_iterable(G[n] for n in testset))
|
94 |
+
return len(set(G) - testset - nbrs) == 0
|
venv/lib/python3.10/site-packages/networkx/algorithms/efficiency_measures.py
ADDED
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Provides functions for computing the efficiency of nodes and graphs."""
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
from networkx.exception import NetworkXNoPath
|
5 |
+
|
6 |
+
from ..utils import not_implemented_for
|
7 |
+
|
8 |
+
__all__ = ["efficiency", "local_efficiency", "global_efficiency"]
|
9 |
+
|
10 |
+
|
11 |
+
@not_implemented_for("directed")
|
12 |
+
@nx._dispatchable
|
13 |
+
def efficiency(G, u, v):
|
14 |
+
"""Returns the efficiency of a pair of nodes in a graph.
|
15 |
+
|
16 |
+
The *efficiency* of a pair of nodes is the multiplicative inverse of the
|
17 |
+
shortest path distance between the nodes [1]_. Returns 0 if no path
|
18 |
+
between nodes.
|
19 |
+
|
20 |
+
Parameters
|
21 |
+
----------
|
22 |
+
G : :class:`networkx.Graph`
|
23 |
+
An undirected graph for which to compute the average local efficiency.
|
24 |
+
u, v : node
|
25 |
+
Nodes in the graph ``G``.
|
26 |
+
|
27 |
+
Returns
|
28 |
+
-------
|
29 |
+
float
|
30 |
+
Multiplicative inverse of the shortest path distance between the nodes.
|
31 |
+
|
32 |
+
Examples
|
33 |
+
--------
|
34 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
35 |
+
>>> nx.efficiency(G, 2, 3) # this gives efficiency for node 2 and 3
|
36 |
+
0.5
|
37 |
+
|
38 |
+
Notes
|
39 |
+
-----
|
40 |
+
Edge weights are ignored when computing the shortest path distances.
|
41 |
+
|
42 |
+
See also
|
43 |
+
--------
|
44 |
+
local_efficiency
|
45 |
+
global_efficiency
|
46 |
+
|
47 |
+
References
|
48 |
+
----------
|
49 |
+
.. [1] Latora, Vito, and Massimo Marchiori.
|
50 |
+
"Efficient behavior of small-world networks."
|
51 |
+
*Physical Review Letters* 87.19 (2001): 198701.
|
52 |
+
<https://doi.org/10.1103/PhysRevLett.87.198701>
|
53 |
+
|
54 |
+
"""
|
55 |
+
try:
|
56 |
+
eff = 1 / nx.shortest_path_length(G, u, v)
|
57 |
+
except NetworkXNoPath:
|
58 |
+
eff = 0
|
59 |
+
return eff
|
60 |
+
|
61 |
+
|
62 |
+
@not_implemented_for("directed")
|
63 |
+
@nx._dispatchable
|
64 |
+
def global_efficiency(G):
|
65 |
+
"""Returns the average global efficiency of the graph.
|
66 |
+
|
67 |
+
The *efficiency* of a pair of nodes in a graph is the multiplicative
|
68 |
+
inverse of the shortest path distance between the nodes. The *average
|
69 |
+
global efficiency* of a graph is the average efficiency of all pairs of
|
70 |
+
nodes [1]_.
|
71 |
+
|
72 |
+
Parameters
|
73 |
+
----------
|
74 |
+
G : :class:`networkx.Graph`
|
75 |
+
An undirected graph for which to compute the average global efficiency.
|
76 |
+
|
77 |
+
Returns
|
78 |
+
-------
|
79 |
+
float
|
80 |
+
The average global efficiency of the graph.
|
81 |
+
|
82 |
+
Examples
|
83 |
+
--------
|
84 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
85 |
+
>>> round(nx.global_efficiency(G), 12)
|
86 |
+
0.916666666667
|
87 |
+
|
88 |
+
Notes
|
89 |
+
-----
|
90 |
+
Edge weights are ignored when computing the shortest path distances.
|
91 |
+
|
92 |
+
See also
|
93 |
+
--------
|
94 |
+
local_efficiency
|
95 |
+
|
96 |
+
References
|
97 |
+
----------
|
98 |
+
.. [1] Latora, Vito, and Massimo Marchiori.
|
99 |
+
"Efficient behavior of small-world networks."
|
100 |
+
*Physical Review Letters* 87.19 (2001): 198701.
|
101 |
+
<https://doi.org/10.1103/PhysRevLett.87.198701>
|
102 |
+
|
103 |
+
"""
|
104 |
+
n = len(G)
|
105 |
+
denom = n * (n - 1)
|
106 |
+
if denom != 0:
|
107 |
+
lengths = nx.all_pairs_shortest_path_length(G)
|
108 |
+
g_eff = 0
|
109 |
+
for source, targets in lengths:
|
110 |
+
for target, distance in targets.items():
|
111 |
+
if distance > 0:
|
112 |
+
g_eff += 1 / distance
|
113 |
+
g_eff /= denom
|
114 |
+
# g_eff = sum(1 / d for s, tgts in lengths
|
115 |
+
# for t, d in tgts.items() if d > 0) / denom
|
116 |
+
else:
|
117 |
+
g_eff = 0
|
118 |
+
# TODO This can be made more efficient by computing all pairs shortest
|
119 |
+
# path lengths in parallel.
|
120 |
+
return g_eff
|
121 |
+
|
122 |
+
|
123 |
+
@not_implemented_for("directed")
|
124 |
+
@nx._dispatchable
|
125 |
+
def local_efficiency(G):
|
126 |
+
"""Returns the average local efficiency of the graph.
|
127 |
+
|
128 |
+
The *efficiency* of a pair of nodes in a graph is the multiplicative
|
129 |
+
inverse of the shortest path distance between the nodes. The *local
|
130 |
+
efficiency* of a node in the graph is the average global efficiency of the
|
131 |
+
subgraph induced by the neighbors of the node. The *average local
|
132 |
+
efficiency* is the average of the local efficiencies of each node [1]_.
|
133 |
+
|
134 |
+
Parameters
|
135 |
+
----------
|
136 |
+
G : :class:`networkx.Graph`
|
137 |
+
An undirected graph for which to compute the average local efficiency.
|
138 |
+
|
139 |
+
Returns
|
140 |
+
-------
|
141 |
+
float
|
142 |
+
The average local efficiency of the graph.
|
143 |
+
|
144 |
+
Examples
|
145 |
+
--------
|
146 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
147 |
+
>>> nx.local_efficiency(G)
|
148 |
+
0.9166666666666667
|
149 |
+
|
150 |
+
Notes
|
151 |
+
-----
|
152 |
+
Edge weights are ignored when computing the shortest path distances.
|
153 |
+
|
154 |
+
See also
|
155 |
+
--------
|
156 |
+
global_efficiency
|
157 |
+
|
158 |
+
References
|
159 |
+
----------
|
160 |
+
.. [1] Latora, Vito, and Massimo Marchiori.
|
161 |
+
"Efficient behavior of small-world networks."
|
162 |
+
*Physical Review Letters* 87.19 (2001): 198701.
|
163 |
+
<https://doi.org/10.1103/PhysRevLett.87.198701>
|
164 |
+
|
165 |
+
"""
|
166 |
+
# TODO This summation can be trivially parallelized.
|
167 |
+
efficiency_list = (global_efficiency(G.subgraph(G[v])) for v in G)
|
168 |
+
return sum(efficiency_list) / len(G)
|
venv/lib/python3.10/site-packages/networkx/algorithms/euler.py
ADDED
@@ -0,0 +1,469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Eulerian circuits and graphs.
|
3 |
+
"""
|
4 |
+
from itertools import combinations
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
|
8 |
+
from ..utils import arbitrary_element, not_implemented_for
|
9 |
+
|
10 |
+
__all__ = [
|
11 |
+
"is_eulerian",
|
12 |
+
"eulerian_circuit",
|
13 |
+
"eulerize",
|
14 |
+
"is_semieulerian",
|
15 |
+
"has_eulerian_path",
|
16 |
+
"eulerian_path",
|
17 |
+
]
|
18 |
+
|
19 |
+
|
20 |
+
@nx._dispatchable
|
21 |
+
def is_eulerian(G):
|
22 |
+
"""Returns True if and only if `G` is Eulerian.
|
23 |
+
|
24 |
+
A graph is *Eulerian* if it has an Eulerian circuit. An *Eulerian
|
25 |
+
circuit* is a closed walk that includes each edge of a graph exactly
|
26 |
+
once.
|
27 |
+
|
28 |
+
Graphs with isolated vertices (i.e. vertices with zero degree) are not
|
29 |
+
considered to have Eulerian circuits. Therefore, if the graph is not
|
30 |
+
connected (or not strongly connected, for directed graphs), this function
|
31 |
+
returns False.
|
32 |
+
|
33 |
+
Parameters
|
34 |
+
----------
|
35 |
+
G : NetworkX graph
|
36 |
+
A graph, either directed or undirected.
|
37 |
+
|
38 |
+
Examples
|
39 |
+
--------
|
40 |
+
>>> nx.is_eulerian(nx.DiGraph({0: [3], 1: [2], 2: [3], 3: [0, 1]}))
|
41 |
+
True
|
42 |
+
>>> nx.is_eulerian(nx.complete_graph(5))
|
43 |
+
True
|
44 |
+
>>> nx.is_eulerian(nx.petersen_graph())
|
45 |
+
False
|
46 |
+
|
47 |
+
If you prefer to allow graphs with isolated vertices to have Eulerian circuits,
|
48 |
+
you can first remove such vertices and then call `is_eulerian` as below example shows.
|
49 |
+
|
50 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (0, 2)])
|
51 |
+
>>> G.add_node(3)
|
52 |
+
>>> nx.is_eulerian(G)
|
53 |
+
False
|
54 |
+
|
55 |
+
>>> G.remove_nodes_from(list(nx.isolates(G)))
|
56 |
+
>>> nx.is_eulerian(G)
|
57 |
+
True
|
58 |
+
|
59 |
+
|
60 |
+
"""
|
61 |
+
if G.is_directed():
|
62 |
+
# Every node must have equal in degree and out degree and the
|
63 |
+
# graph must be strongly connected
|
64 |
+
return all(
|
65 |
+
G.in_degree(n) == G.out_degree(n) for n in G
|
66 |
+
) and nx.is_strongly_connected(G)
|
67 |
+
# An undirected Eulerian graph has no vertices of odd degree and
|
68 |
+
# must be connected.
|
69 |
+
return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G)
|
70 |
+
|
71 |
+
|
72 |
+
@nx._dispatchable
|
73 |
+
def is_semieulerian(G):
|
74 |
+
"""Return True iff `G` is semi-Eulerian.
|
75 |
+
|
76 |
+
G is semi-Eulerian if it has an Eulerian path but no Eulerian circuit.
|
77 |
+
|
78 |
+
See Also
|
79 |
+
--------
|
80 |
+
has_eulerian_path
|
81 |
+
is_eulerian
|
82 |
+
"""
|
83 |
+
return has_eulerian_path(G) and not is_eulerian(G)
|
84 |
+
|
85 |
+
|
86 |
+
def _find_path_start(G):
|
87 |
+
"""Return a suitable starting vertex for an Eulerian path.
|
88 |
+
|
89 |
+
If no path exists, return None.
|
90 |
+
"""
|
91 |
+
if not has_eulerian_path(G):
|
92 |
+
return None
|
93 |
+
|
94 |
+
if is_eulerian(G):
|
95 |
+
return arbitrary_element(G)
|
96 |
+
|
97 |
+
if G.is_directed():
|
98 |
+
v1, v2 = (v for v in G if G.in_degree(v) != G.out_degree(v))
|
99 |
+
# Determines which is the 'start' node (as opposed to the 'end')
|
100 |
+
if G.out_degree(v1) > G.in_degree(v1):
|
101 |
+
return v1
|
102 |
+
else:
|
103 |
+
return v2
|
104 |
+
|
105 |
+
else:
|
106 |
+
# In an undirected graph randomly choose one of the possibilities
|
107 |
+
start = [v for v in G if G.degree(v) % 2 != 0][0]
|
108 |
+
return start
|
109 |
+
|
110 |
+
|
111 |
+
def _simplegraph_eulerian_circuit(G, source):
|
112 |
+
if G.is_directed():
|
113 |
+
degree = G.out_degree
|
114 |
+
edges = G.out_edges
|
115 |
+
else:
|
116 |
+
degree = G.degree
|
117 |
+
edges = G.edges
|
118 |
+
vertex_stack = [source]
|
119 |
+
last_vertex = None
|
120 |
+
while vertex_stack:
|
121 |
+
current_vertex = vertex_stack[-1]
|
122 |
+
if degree(current_vertex) == 0:
|
123 |
+
if last_vertex is not None:
|
124 |
+
yield (last_vertex, current_vertex)
|
125 |
+
last_vertex = current_vertex
|
126 |
+
vertex_stack.pop()
|
127 |
+
else:
|
128 |
+
_, next_vertex = arbitrary_element(edges(current_vertex))
|
129 |
+
vertex_stack.append(next_vertex)
|
130 |
+
G.remove_edge(current_vertex, next_vertex)
|
131 |
+
|
132 |
+
|
133 |
+
def _multigraph_eulerian_circuit(G, source):
|
134 |
+
if G.is_directed():
|
135 |
+
degree = G.out_degree
|
136 |
+
edges = G.out_edges
|
137 |
+
else:
|
138 |
+
degree = G.degree
|
139 |
+
edges = G.edges
|
140 |
+
vertex_stack = [(source, None)]
|
141 |
+
last_vertex = None
|
142 |
+
last_key = None
|
143 |
+
while vertex_stack:
|
144 |
+
current_vertex, current_key = vertex_stack[-1]
|
145 |
+
if degree(current_vertex) == 0:
|
146 |
+
if last_vertex is not None:
|
147 |
+
yield (last_vertex, current_vertex, last_key)
|
148 |
+
last_vertex, last_key = current_vertex, current_key
|
149 |
+
vertex_stack.pop()
|
150 |
+
else:
|
151 |
+
triple = arbitrary_element(edges(current_vertex, keys=True))
|
152 |
+
_, next_vertex, next_key = triple
|
153 |
+
vertex_stack.append((next_vertex, next_key))
|
154 |
+
G.remove_edge(current_vertex, next_vertex, next_key)
|
155 |
+
|
156 |
+
|
157 |
+
@nx._dispatchable
|
158 |
+
def eulerian_circuit(G, source=None, keys=False):
|
159 |
+
"""Returns an iterator over the edges of an Eulerian circuit in `G`.
|
160 |
+
|
161 |
+
An *Eulerian circuit* is a closed walk that includes each edge of a
|
162 |
+
graph exactly once.
|
163 |
+
|
164 |
+
Parameters
|
165 |
+
----------
|
166 |
+
G : NetworkX graph
|
167 |
+
A graph, either directed or undirected.
|
168 |
+
|
169 |
+
source : node, optional
|
170 |
+
Starting node for circuit.
|
171 |
+
|
172 |
+
keys : bool
|
173 |
+
If False, edges generated by this function will be of the form
|
174 |
+
``(u, v)``. Otherwise, edges will be of the form ``(u, v, k)``.
|
175 |
+
This option is ignored unless `G` is a multigraph.
|
176 |
+
|
177 |
+
Returns
|
178 |
+
-------
|
179 |
+
edges : iterator
|
180 |
+
An iterator over edges in the Eulerian circuit.
|
181 |
+
|
182 |
+
Raises
|
183 |
+
------
|
184 |
+
NetworkXError
|
185 |
+
If the graph is not Eulerian.
|
186 |
+
|
187 |
+
See Also
|
188 |
+
--------
|
189 |
+
is_eulerian
|
190 |
+
|
191 |
+
Notes
|
192 |
+
-----
|
193 |
+
This is a linear time implementation of an algorithm adapted from [1]_.
|
194 |
+
|
195 |
+
For general information about Euler tours, see [2]_.
|
196 |
+
|
197 |
+
References
|
198 |
+
----------
|
199 |
+
.. [1] J. Edmonds, E. L. Johnson.
|
200 |
+
Matching, Euler tours and the Chinese postman.
|
201 |
+
Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
|
202 |
+
.. [2] https://en.wikipedia.org/wiki/Eulerian_path
|
203 |
+
|
204 |
+
Examples
|
205 |
+
--------
|
206 |
+
To get an Eulerian circuit in an undirected graph::
|
207 |
+
|
208 |
+
>>> G = nx.complete_graph(3)
|
209 |
+
>>> list(nx.eulerian_circuit(G))
|
210 |
+
[(0, 2), (2, 1), (1, 0)]
|
211 |
+
>>> list(nx.eulerian_circuit(G, source=1))
|
212 |
+
[(1, 2), (2, 0), (0, 1)]
|
213 |
+
|
214 |
+
To get the sequence of vertices in an Eulerian circuit::
|
215 |
+
|
216 |
+
>>> [u for u, v in nx.eulerian_circuit(G)]
|
217 |
+
[0, 2, 1]
|
218 |
+
|
219 |
+
"""
|
220 |
+
if not is_eulerian(G):
|
221 |
+
raise nx.NetworkXError("G is not Eulerian.")
|
222 |
+
if G.is_directed():
|
223 |
+
G = G.reverse()
|
224 |
+
else:
|
225 |
+
G = G.copy()
|
226 |
+
if source is None:
|
227 |
+
source = arbitrary_element(G)
|
228 |
+
if G.is_multigraph():
|
229 |
+
for u, v, k in _multigraph_eulerian_circuit(G, source):
|
230 |
+
if keys:
|
231 |
+
yield u, v, k
|
232 |
+
else:
|
233 |
+
yield u, v
|
234 |
+
else:
|
235 |
+
yield from _simplegraph_eulerian_circuit(G, source)
|
236 |
+
|
237 |
+
|
238 |
+
@nx._dispatchable
|
239 |
+
def has_eulerian_path(G, source=None):
|
240 |
+
"""Return True iff `G` has an Eulerian path.
|
241 |
+
|
242 |
+
An Eulerian path is a path in a graph which uses each edge of a graph
|
243 |
+
exactly once. If `source` is specified, then this function checks
|
244 |
+
whether an Eulerian path that starts at node `source` exists.
|
245 |
+
|
246 |
+
A directed graph has an Eulerian path iff:
|
247 |
+
- at most one vertex has out_degree - in_degree = 1,
|
248 |
+
- at most one vertex has in_degree - out_degree = 1,
|
249 |
+
- every other vertex has equal in_degree and out_degree,
|
250 |
+
- and all of its vertices belong to a single connected
|
251 |
+
component of the underlying undirected graph.
|
252 |
+
|
253 |
+
If `source` is not None, an Eulerian path starting at `source` exists if no
|
254 |
+
other node has out_degree - in_degree = 1. This is equivalent to either
|
255 |
+
there exists an Eulerian circuit or `source` has out_degree - in_degree = 1
|
256 |
+
and the conditions above hold.
|
257 |
+
|
258 |
+
An undirected graph has an Eulerian path iff:
|
259 |
+
- exactly zero or two vertices have odd degree,
|
260 |
+
- and all of its vertices belong to a single connected component.
|
261 |
+
|
262 |
+
If `source` is not None, an Eulerian path starting at `source` exists if
|
263 |
+
either there exists an Eulerian circuit or `source` has an odd degree and the
|
264 |
+
conditions above hold.
|
265 |
+
|
266 |
+
Graphs with isolated vertices (i.e. vertices with zero degree) are not considered
|
267 |
+
to have an Eulerian path. Therefore, if the graph is not connected (or not strongly
|
268 |
+
connected, for directed graphs), this function returns False.
|
269 |
+
|
270 |
+
Parameters
|
271 |
+
----------
|
272 |
+
G : NetworkX Graph
|
273 |
+
The graph to find an euler path in.
|
274 |
+
|
275 |
+
source : node, optional
|
276 |
+
Starting node for path.
|
277 |
+
|
278 |
+
Returns
|
279 |
+
-------
|
280 |
+
Bool : True if G has an Eulerian path.
|
281 |
+
|
282 |
+
Examples
|
283 |
+
--------
|
284 |
+
If you prefer to allow graphs with isolated vertices to have Eulerian path,
|
285 |
+
you can first remove such vertices and then call `has_eulerian_path` as below example shows.
|
286 |
+
|
287 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (0, 2)])
|
288 |
+
>>> G.add_node(3)
|
289 |
+
>>> nx.has_eulerian_path(G)
|
290 |
+
False
|
291 |
+
|
292 |
+
>>> G.remove_nodes_from(list(nx.isolates(G)))
|
293 |
+
>>> nx.has_eulerian_path(G)
|
294 |
+
True
|
295 |
+
|
296 |
+
See Also
|
297 |
+
--------
|
298 |
+
is_eulerian
|
299 |
+
eulerian_path
|
300 |
+
"""
|
301 |
+
if nx.is_eulerian(G):
|
302 |
+
return True
|
303 |
+
|
304 |
+
if G.is_directed():
|
305 |
+
ins = G.in_degree
|
306 |
+
outs = G.out_degree
|
307 |
+
# Since we know it is not eulerian, outs - ins must be 1 for source
|
308 |
+
if source is not None and outs[source] - ins[source] != 1:
|
309 |
+
return False
|
310 |
+
|
311 |
+
unbalanced_ins = 0
|
312 |
+
unbalanced_outs = 0
|
313 |
+
for v in G:
|
314 |
+
if ins[v] - outs[v] == 1:
|
315 |
+
unbalanced_ins += 1
|
316 |
+
elif outs[v] - ins[v] == 1:
|
317 |
+
unbalanced_outs += 1
|
318 |
+
elif ins[v] != outs[v]:
|
319 |
+
return False
|
320 |
+
|
321 |
+
return (
|
322 |
+
unbalanced_ins <= 1 and unbalanced_outs <= 1 and nx.is_weakly_connected(G)
|
323 |
+
)
|
324 |
+
else:
|
325 |
+
# We know it is not eulerian, so degree of source must be odd.
|
326 |
+
if source is not None and G.degree[source] % 2 != 1:
|
327 |
+
return False
|
328 |
+
|
329 |
+
# Sum is 2 since we know it is not eulerian (which implies sum is 0)
|
330 |
+
return sum(d % 2 == 1 for v, d in G.degree()) == 2 and nx.is_connected(G)
|
331 |
+
|
332 |
+
|
333 |
+
@nx._dispatchable
|
334 |
+
def eulerian_path(G, source=None, keys=False):
|
335 |
+
"""Return an iterator over the edges of an Eulerian path in `G`.
|
336 |
+
|
337 |
+
Parameters
|
338 |
+
----------
|
339 |
+
G : NetworkX Graph
|
340 |
+
The graph in which to look for an eulerian path.
|
341 |
+
source : node or None (default: None)
|
342 |
+
The node at which to start the search. None means search over all
|
343 |
+
starting nodes.
|
344 |
+
keys : Bool (default: False)
|
345 |
+
Indicates whether to yield edge 3-tuples (u, v, edge_key).
|
346 |
+
The default yields edge 2-tuples
|
347 |
+
|
348 |
+
Yields
|
349 |
+
------
|
350 |
+
Edge tuples along the eulerian path.
|
351 |
+
|
352 |
+
Warning: If `source` provided is not the start node of an Euler path
|
353 |
+
will raise error even if an Euler Path exists.
|
354 |
+
"""
|
355 |
+
if not has_eulerian_path(G, source):
|
356 |
+
raise nx.NetworkXError("Graph has no Eulerian paths.")
|
357 |
+
if G.is_directed():
|
358 |
+
G = G.reverse()
|
359 |
+
if source is None or nx.is_eulerian(G) is False:
|
360 |
+
source = _find_path_start(G)
|
361 |
+
if G.is_multigraph():
|
362 |
+
for u, v, k in _multigraph_eulerian_circuit(G, source):
|
363 |
+
if keys:
|
364 |
+
yield u, v, k
|
365 |
+
else:
|
366 |
+
yield u, v
|
367 |
+
else:
|
368 |
+
yield from _simplegraph_eulerian_circuit(G, source)
|
369 |
+
else:
|
370 |
+
G = G.copy()
|
371 |
+
if source is None:
|
372 |
+
source = _find_path_start(G)
|
373 |
+
if G.is_multigraph():
|
374 |
+
if keys:
|
375 |
+
yield from reversed(
|
376 |
+
[(v, u, k) for u, v, k in _multigraph_eulerian_circuit(G, source)]
|
377 |
+
)
|
378 |
+
else:
|
379 |
+
yield from reversed(
|
380 |
+
[(v, u) for u, v, k in _multigraph_eulerian_circuit(G, source)]
|
381 |
+
)
|
382 |
+
else:
|
383 |
+
yield from reversed(
|
384 |
+
[(v, u) for u, v in _simplegraph_eulerian_circuit(G, source)]
|
385 |
+
)
|
386 |
+
|
387 |
+
|
388 |
+
@not_implemented_for("directed")
|
389 |
+
@nx._dispatchable(returns_graph=True)
|
390 |
+
def eulerize(G):
|
391 |
+
"""Transforms a graph into an Eulerian graph.
|
392 |
+
|
393 |
+
If `G` is Eulerian the result is `G` as a MultiGraph, otherwise the result is a smallest
|
394 |
+
(in terms of the number of edges) multigraph whose underlying simple graph is `G`.
|
395 |
+
|
396 |
+
Parameters
|
397 |
+
----------
|
398 |
+
G : NetworkX graph
|
399 |
+
An undirected graph
|
400 |
+
|
401 |
+
Returns
|
402 |
+
-------
|
403 |
+
G : NetworkX multigraph
|
404 |
+
|
405 |
+
Raises
|
406 |
+
------
|
407 |
+
NetworkXError
|
408 |
+
If the graph is not connected.
|
409 |
+
|
410 |
+
See Also
|
411 |
+
--------
|
412 |
+
is_eulerian
|
413 |
+
eulerian_circuit
|
414 |
+
|
415 |
+
References
|
416 |
+
----------
|
417 |
+
.. [1] J. Edmonds, E. L. Johnson.
|
418 |
+
Matching, Euler tours and the Chinese postman.
|
419 |
+
Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
|
420 |
+
.. [2] https://en.wikipedia.org/wiki/Eulerian_path
|
421 |
+
.. [3] http://web.math.princeton.edu/math_alive/5/Notes1.pdf
|
422 |
+
|
423 |
+
Examples
|
424 |
+
--------
|
425 |
+
>>> G = nx.complete_graph(10)
|
426 |
+
>>> H = nx.eulerize(G)
|
427 |
+
>>> nx.is_eulerian(H)
|
428 |
+
True
|
429 |
+
|
430 |
+
"""
|
431 |
+
if G.order() == 0:
|
432 |
+
raise nx.NetworkXPointlessConcept("Cannot Eulerize null graph")
|
433 |
+
if not nx.is_connected(G):
|
434 |
+
raise nx.NetworkXError("G is not connected")
|
435 |
+
odd_degree_nodes = [n for n, d in G.degree() if d % 2 == 1]
|
436 |
+
G = nx.MultiGraph(G)
|
437 |
+
if len(odd_degree_nodes) == 0:
|
438 |
+
return G
|
439 |
+
|
440 |
+
# get all shortest paths between vertices of odd degree
|
441 |
+
odd_deg_pairs_paths = [
|
442 |
+
(m, {n: nx.shortest_path(G, source=m, target=n)})
|
443 |
+
for m, n in combinations(odd_degree_nodes, 2)
|
444 |
+
]
|
445 |
+
|
446 |
+
# use the number of vertices in a graph + 1 as an upper bound on
|
447 |
+
# the maximum length of a path in G
|
448 |
+
upper_bound_on_max_path_length = len(G) + 1
|
449 |
+
|
450 |
+
# use "len(G) + 1 - len(P)",
|
451 |
+
# where P is a shortest path between vertices n and m,
|
452 |
+
# as edge-weights in a new graph
|
453 |
+
# store the paths in the graph for easy indexing later
|
454 |
+
Gp = nx.Graph()
|
455 |
+
for n, Ps in odd_deg_pairs_paths:
|
456 |
+
for m, P in Ps.items():
|
457 |
+
if n != m:
|
458 |
+
Gp.add_edge(
|
459 |
+
m, n, weight=upper_bound_on_max_path_length - len(P), path=P
|
460 |
+
)
|
461 |
+
|
462 |
+
# find the minimum weight matching of edges in the weighted graph
|
463 |
+
best_matching = nx.Graph(list(nx.max_weight_matching(Gp)))
|
464 |
+
|
465 |
+
# duplicate each edge along each path in the set of paths in Gp
|
466 |
+
for m, n in best_matching.edges():
|
467 |
+
path = Gp[m][n]["path"]
|
468 |
+
G.add_edges_from(nx.utils.pairwise(path))
|
469 |
+
return G
|
venv/lib/python3.10/site-packages/networkx/algorithms/graph_hashing.py
ADDED
@@ -0,0 +1,322 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Functions for hashing graphs to strings.
|
3 |
+
Isomorphic graphs should be assigned identical hashes.
|
4 |
+
For now, only Weisfeiler-Lehman hashing is implemented.
|
5 |
+
"""
|
6 |
+
|
7 |
+
from collections import Counter, defaultdict
|
8 |
+
from hashlib import blake2b
|
9 |
+
|
10 |
+
import networkx as nx
|
11 |
+
|
12 |
+
__all__ = ["weisfeiler_lehman_graph_hash", "weisfeiler_lehman_subgraph_hashes"]
|
13 |
+
|
14 |
+
|
15 |
+
def _hash_label(label, digest_size):
|
16 |
+
return blake2b(label.encode("ascii"), digest_size=digest_size).hexdigest()
|
17 |
+
|
18 |
+
|
19 |
+
def _init_node_labels(G, edge_attr, node_attr):
|
20 |
+
if node_attr:
|
21 |
+
return {u: str(dd[node_attr]) for u, dd in G.nodes(data=True)}
|
22 |
+
elif edge_attr:
|
23 |
+
return {u: "" for u in G}
|
24 |
+
else:
|
25 |
+
return {u: str(deg) for u, deg in G.degree()}
|
26 |
+
|
27 |
+
|
28 |
+
def _neighborhood_aggregate(G, node, node_labels, edge_attr=None):
|
29 |
+
"""
|
30 |
+
Compute new labels for given node by aggregating
|
31 |
+
the labels of each node's neighbors.
|
32 |
+
"""
|
33 |
+
label_list = []
|
34 |
+
for nbr in G.neighbors(node):
|
35 |
+
prefix = "" if edge_attr is None else str(G[node][nbr][edge_attr])
|
36 |
+
label_list.append(prefix + node_labels[nbr])
|
37 |
+
return node_labels[node] + "".join(sorted(label_list))
|
38 |
+
|
39 |
+
|
40 |
+
@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr")
|
41 |
+
def weisfeiler_lehman_graph_hash(
|
42 |
+
G, edge_attr=None, node_attr=None, iterations=3, digest_size=16
|
43 |
+
):
|
44 |
+
"""Return Weisfeiler Lehman (WL) graph hash.
|
45 |
+
|
46 |
+
The function iteratively aggregates and hashes neighborhoods of each node.
|
47 |
+
After each node's neighbors are hashed to obtain updated node labels,
|
48 |
+
a hashed histogram of resulting labels is returned as the final hash.
|
49 |
+
|
50 |
+
Hashes are identical for isomorphic graphs and strong guarantees that
|
51 |
+
non-isomorphic graphs will get different hashes. See [1]_ for details.
|
52 |
+
|
53 |
+
If no node or edge attributes are provided, the degree of each node
|
54 |
+
is used as its initial label.
|
55 |
+
Otherwise, node and/or edge labels are used to compute the hash.
|
56 |
+
|
57 |
+
Parameters
|
58 |
+
----------
|
59 |
+
G : graph
|
60 |
+
The graph to be hashed.
|
61 |
+
Can have node and/or edge attributes. Can also have no attributes.
|
62 |
+
edge_attr : string, optional (default=None)
|
63 |
+
The key in edge attribute dictionary to be used for hashing.
|
64 |
+
If None, edge labels are ignored.
|
65 |
+
node_attr: string, optional (default=None)
|
66 |
+
The key in node attribute dictionary to be used for hashing.
|
67 |
+
If None, and no edge_attr given, use the degrees of the nodes as labels.
|
68 |
+
iterations: int, optional (default=3)
|
69 |
+
Number of neighbor aggregations to perform.
|
70 |
+
Should be larger for larger graphs.
|
71 |
+
digest_size: int, optional (default=16)
|
72 |
+
Size (in bits) of blake2b hash digest to use for hashing node labels.
|
73 |
+
|
74 |
+
Returns
|
75 |
+
-------
|
76 |
+
h : string
|
77 |
+
Hexadecimal string corresponding to hash of the input graph.
|
78 |
+
|
79 |
+
Examples
|
80 |
+
--------
|
81 |
+
Two graphs with edge attributes that are isomorphic, except for
|
82 |
+
differences in the edge labels.
|
83 |
+
|
84 |
+
>>> G1 = nx.Graph()
|
85 |
+
>>> G1.add_edges_from(
|
86 |
+
... [
|
87 |
+
... (1, 2, {"label": "A"}),
|
88 |
+
... (2, 3, {"label": "A"}),
|
89 |
+
... (3, 1, {"label": "A"}),
|
90 |
+
... (1, 4, {"label": "B"}),
|
91 |
+
... ]
|
92 |
+
... )
|
93 |
+
>>> G2 = nx.Graph()
|
94 |
+
>>> G2.add_edges_from(
|
95 |
+
... [
|
96 |
+
... (5, 6, {"label": "B"}),
|
97 |
+
... (6, 7, {"label": "A"}),
|
98 |
+
... (7, 5, {"label": "A"}),
|
99 |
+
... (7, 8, {"label": "A"}),
|
100 |
+
... ]
|
101 |
+
... )
|
102 |
+
|
103 |
+
Omitting the `edge_attr` option, results in identical hashes.
|
104 |
+
|
105 |
+
>>> nx.weisfeiler_lehman_graph_hash(G1)
|
106 |
+
'7bc4dde9a09d0b94c5097b219891d81a'
|
107 |
+
>>> nx.weisfeiler_lehman_graph_hash(G2)
|
108 |
+
'7bc4dde9a09d0b94c5097b219891d81a'
|
109 |
+
|
110 |
+
With edge labels, the graphs are no longer assigned
|
111 |
+
the same hash digest.
|
112 |
+
|
113 |
+
>>> nx.weisfeiler_lehman_graph_hash(G1, edge_attr="label")
|
114 |
+
'c653d85538bcf041d88c011f4f905f10'
|
115 |
+
>>> nx.weisfeiler_lehman_graph_hash(G2, edge_attr="label")
|
116 |
+
'3dcd84af1ca855d0eff3c978d88e7ec7'
|
117 |
+
|
118 |
+
Notes
|
119 |
+
-----
|
120 |
+
To return the WL hashes of each subgraph of a graph, use
|
121 |
+
`weisfeiler_lehman_subgraph_hashes`
|
122 |
+
|
123 |
+
Similarity between hashes does not imply similarity between graphs.
|
124 |
+
|
125 |
+
References
|
126 |
+
----------
|
127 |
+
.. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen,
|
128 |
+
Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman
|
129 |
+
Graph Kernels. Journal of Machine Learning Research. 2011.
|
130 |
+
http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf
|
131 |
+
|
132 |
+
See also
|
133 |
+
--------
|
134 |
+
weisfeiler_lehman_subgraph_hashes
|
135 |
+
"""
|
136 |
+
|
137 |
+
def weisfeiler_lehman_step(G, labels, edge_attr=None):
|
138 |
+
"""
|
139 |
+
Apply neighborhood aggregation to each node
|
140 |
+
in the graph.
|
141 |
+
Computes a dictionary with labels for each node.
|
142 |
+
"""
|
143 |
+
new_labels = {}
|
144 |
+
for node in G.nodes():
|
145 |
+
label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr)
|
146 |
+
new_labels[node] = _hash_label(label, digest_size)
|
147 |
+
return new_labels
|
148 |
+
|
149 |
+
# set initial node labels
|
150 |
+
node_labels = _init_node_labels(G, edge_attr, node_attr)
|
151 |
+
|
152 |
+
subgraph_hash_counts = []
|
153 |
+
for _ in range(iterations):
|
154 |
+
node_labels = weisfeiler_lehman_step(G, node_labels, edge_attr=edge_attr)
|
155 |
+
counter = Counter(node_labels.values())
|
156 |
+
# sort the counter, extend total counts
|
157 |
+
subgraph_hash_counts.extend(sorted(counter.items(), key=lambda x: x[0]))
|
158 |
+
|
159 |
+
# hash the final counter
|
160 |
+
return _hash_label(str(tuple(subgraph_hash_counts)), digest_size)
|
161 |
+
|
162 |
+
|
163 |
+
@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr")
|
164 |
+
def weisfeiler_lehman_subgraph_hashes(
|
165 |
+
G,
|
166 |
+
edge_attr=None,
|
167 |
+
node_attr=None,
|
168 |
+
iterations=3,
|
169 |
+
digest_size=16,
|
170 |
+
include_initial_labels=False,
|
171 |
+
):
|
172 |
+
"""
|
173 |
+
Return a dictionary of subgraph hashes by node.
|
174 |
+
|
175 |
+
Dictionary keys are nodes in `G`, and values are a list of hashes.
|
176 |
+
Each hash corresponds to a subgraph rooted at a given node u in `G`.
|
177 |
+
Lists of subgraph hashes are sorted in increasing order of depth from
|
178 |
+
their root node, with the hash at index i corresponding to a subgraph
|
179 |
+
of nodes at most i edges distance from u. Thus, each list will contain
|
180 |
+
`iterations` elements - a hash for a subgraph at each depth. If
|
181 |
+
`include_initial_labels` is set to `True`, each list will additionally
|
182 |
+
have contain a hash of the initial node label (or equivalently a
|
183 |
+
subgraph of depth 0) prepended, totalling ``iterations + 1`` elements.
|
184 |
+
|
185 |
+
The function iteratively aggregates and hashes neighborhoods of each node.
|
186 |
+
This is achieved for each step by replacing for each node its label from
|
187 |
+
the previous iteration with its hashed 1-hop neighborhood aggregate.
|
188 |
+
The new node label is then appended to a list of node labels for each
|
189 |
+
node.
|
190 |
+
|
191 |
+
To aggregate neighborhoods for a node $u$ at each step, all labels of
|
192 |
+
nodes adjacent to $u$ are concatenated. If the `edge_attr` parameter is set,
|
193 |
+
labels for each neighboring node are prefixed with the value of this attribute
|
194 |
+
along the connecting edge from this neighbor to node $u$. The resulting string
|
195 |
+
is then hashed to compress this information into a fixed digest size.
|
196 |
+
|
197 |
+
Thus, at the $i$-th iteration, nodes within $i$ hops influence any given
|
198 |
+
hashed node label. We can therefore say that at depth $i$ for node $u$
|
199 |
+
we have a hash for a subgraph induced by the $i$-hop neighborhood of $u$.
|
200 |
+
|
201 |
+
The output can be used to to create general Weisfeiler-Lehman graph kernels,
|
202 |
+
or generate features for graphs or nodes - for example to generate 'words' in
|
203 |
+
a graph as seen in the 'graph2vec' algorithm.
|
204 |
+
See [1]_ & [2]_ respectively for details.
|
205 |
+
|
206 |
+
Hashes are identical for isomorphic subgraphs and there exist strong
|
207 |
+
guarantees that non-isomorphic graphs will get different hashes.
|
208 |
+
See [1]_ for details.
|
209 |
+
|
210 |
+
If no node or edge attributes are provided, the degree of each node
|
211 |
+
is used as its initial label.
|
212 |
+
Otherwise, node and/or edge labels are used to compute the hash.
|
213 |
+
|
214 |
+
Parameters
|
215 |
+
----------
|
216 |
+
G : graph
|
217 |
+
The graph to be hashed.
|
218 |
+
Can have node and/or edge attributes. Can also have no attributes.
|
219 |
+
edge_attr : string, optional (default=None)
|
220 |
+
The key in edge attribute dictionary to be used for hashing.
|
221 |
+
If None, edge labels are ignored.
|
222 |
+
node_attr : string, optional (default=None)
|
223 |
+
The key in node attribute dictionary to be used for hashing.
|
224 |
+
If None, and no edge_attr given, use the degrees of the nodes as labels.
|
225 |
+
If None, and edge_attr is given, each node starts with an identical label.
|
226 |
+
iterations : int, optional (default=3)
|
227 |
+
Number of neighbor aggregations to perform.
|
228 |
+
Should be larger for larger graphs.
|
229 |
+
digest_size : int, optional (default=16)
|
230 |
+
Size (in bits) of blake2b hash digest to use for hashing node labels.
|
231 |
+
The default size is 16 bits.
|
232 |
+
include_initial_labels : bool, optional (default=False)
|
233 |
+
If True, include the hashed initial node label as the first subgraph
|
234 |
+
hash for each node.
|
235 |
+
|
236 |
+
Returns
|
237 |
+
-------
|
238 |
+
node_subgraph_hashes : dict
|
239 |
+
A dictionary with each key given by a node in G, and each value given
|
240 |
+
by the subgraph hashes in order of depth from the key node.
|
241 |
+
|
242 |
+
Examples
|
243 |
+
--------
|
244 |
+
Finding similar nodes in different graphs:
|
245 |
+
|
246 |
+
>>> G1 = nx.Graph()
|
247 |
+
>>> G1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 5), (4, 6), (5, 7), (6, 7)])
|
248 |
+
>>> G2 = nx.Graph()
|
249 |
+
>>> G2.add_edges_from([(1, 3), (2, 3), (1, 6), (1, 5), (4, 6)])
|
250 |
+
>>> g1_hashes = nx.weisfeiler_lehman_subgraph_hashes(G1, iterations=3, digest_size=8)
|
251 |
+
>>> g2_hashes = nx.weisfeiler_lehman_subgraph_hashes(G2, iterations=3, digest_size=8)
|
252 |
+
|
253 |
+
Even though G1 and G2 are not isomorphic (they have different numbers of edges),
|
254 |
+
the hash sequence of depth 3 for node 1 in G1 and node 5 in G2 are similar:
|
255 |
+
|
256 |
+
>>> g1_hashes[1]
|
257 |
+
['a93b64973cfc8897', 'db1b43ae35a1878f', '57872a7d2059c1c0']
|
258 |
+
>>> g2_hashes[5]
|
259 |
+
['a93b64973cfc8897', 'db1b43ae35a1878f', '1716d2a4012fa4bc']
|
260 |
+
|
261 |
+
The first 2 WL subgraph hashes match. From this we can conclude that it's very
|
262 |
+
likely the neighborhood of 2 hops around these nodes are isomorphic.
|
263 |
+
|
264 |
+
However the 3-hop neighborhoods of ``G1`` and ``G2`` are not isomorphic since the
|
265 |
+
3rd hashes in the lists above are not equal.
|
266 |
+
|
267 |
+
These nodes may be candidates to be classified together since their local topology
|
268 |
+
is similar.
|
269 |
+
|
270 |
+
Notes
|
271 |
+
-----
|
272 |
+
To hash the full graph when subgraph hashes are not needed, use
|
273 |
+
`weisfeiler_lehman_graph_hash` for efficiency.
|
274 |
+
|
275 |
+
Similarity between hashes does not imply similarity between graphs.
|
276 |
+
|
277 |
+
References
|
278 |
+
----------
|
279 |
+
.. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen,
|
280 |
+
Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman
|
281 |
+
Graph Kernels. Journal of Machine Learning Research. 2011.
|
282 |
+
http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf
|
283 |
+
.. [2] Annamalai Narayanan, Mahinthan Chandramohan, Rajasekar Venkatesan,
|
284 |
+
Lihui Chen, Yang Liu and Shantanu Jaiswa. graph2vec: Learning
|
285 |
+
Distributed Representations of Graphs. arXiv. 2017
|
286 |
+
https://arxiv.org/pdf/1707.05005.pdf
|
287 |
+
|
288 |
+
See also
|
289 |
+
--------
|
290 |
+
weisfeiler_lehman_graph_hash
|
291 |
+
"""
|
292 |
+
|
293 |
+
def weisfeiler_lehman_step(G, labels, node_subgraph_hashes, edge_attr=None):
|
294 |
+
"""
|
295 |
+
Apply neighborhood aggregation to each node
|
296 |
+
in the graph.
|
297 |
+
Computes a dictionary with labels for each node.
|
298 |
+
Appends the new hashed label to the dictionary of subgraph hashes
|
299 |
+
originating from and indexed by each node in G
|
300 |
+
"""
|
301 |
+
new_labels = {}
|
302 |
+
for node in G.nodes():
|
303 |
+
label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr)
|
304 |
+
hashed_label = _hash_label(label, digest_size)
|
305 |
+
new_labels[node] = hashed_label
|
306 |
+
node_subgraph_hashes[node].append(hashed_label)
|
307 |
+
return new_labels
|
308 |
+
|
309 |
+
node_labels = _init_node_labels(G, edge_attr, node_attr)
|
310 |
+
if include_initial_labels:
|
311 |
+
node_subgraph_hashes = {
|
312 |
+
k: [_hash_label(v, digest_size)] for k, v in node_labels.items()
|
313 |
+
}
|
314 |
+
else:
|
315 |
+
node_subgraph_hashes = defaultdict(list)
|
316 |
+
|
317 |
+
for _ in range(iterations):
|
318 |
+
node_labels = weisfeiler_lehman_step(
|
319 |
+
G, node_labels, node_subgraph_hashes, edge_attr
|
320 |
+
)
|
321 |
+
|
322 |
+
return dict(node_subgraph_hashes)
|
venv/lib/python3.10/site-packages/networkx/algorithms/graphical.py
ADDED
@@ -0,0 +1,483 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Test sequences for graphiness.
|
2 |
+
"""
|
3 |
+
import heapq
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
__all__ = [
|
8 |
+
"is_graphical",
|
9 |
+
"is_multigraphical",
|
10 |
+
"is_pseudographical",
|
11 |
+
"is_digraphical",
|
12 |
+
"is_valid_degree_sequence_erdos_gallai",
|
13 |
+
"is_valid_degree_sequence_havel_hakimi",
|
14 |
+
]
|
15 |
+
|
16 |
+
|
17 |
+
@nx._dispatchable(graphs=None)
|
18 |
+
def is_graphical(sequence, method="eg"):
|
19 |
+
"""Returns True if sequence is a valid degree sequence.
|
20 |
+
|
21 |
+
A degree sequence is valid if some graph can realize it.
|
22 |
+
|
23 |
+
Parameters
|
24 |
+
----------
|
25 |
+
sequence : list or iterable container
|
26 |
+
A sequence of integer node degrees
|
27 |
+
|
28 |
+
method : "eg" | "hh" (default: 'eg')
|
29 |
+
The method used to validate the degree sequence.
|
30 |
+
"eg" corresponds to the Erdős-Gallai algorithm
|
31 |
+
[EG1960]_, [choudum1986]_, and
|
32 |
+
"hh" to the Havel-Hakimi algorithm
|
33 |
+
[havel1955]_, [hakimi1962]_, [CL1996]_.
|
34 |
+
|
35 |
+
Returns
|
36 |
+
-------
|
37 |
+
valid : bool
|
38 |
+
True if the sequence is a valid degree sequence and False if not.
|
39 |
+
|
40 |
+
Examples
|
41 |
+
--------
|
42 |
+
>>> G = nx.path_graph(4)
|
43 |
+
>>> sequence = (d for n, d in G.degree())
|
44 |
+
>>> nx.is_graphical(sequence)
|
45 |
+
True
|
46 |
+
|
47 |
+
To test a non-graphical sequence:
|
48 |
+
>>> sequence_list = [d for n, d in G.degree()]
|
49 |
+
>>> sequence_list[-1] += 1
|
50 |
+
>>> nx.is_graphical(sequence_list)
|
51 |
+
False
|
52 |
+
|
53 |
+
References
|
54 |
+
----------
|
55 |
+
.. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
|
56 |
+
.. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on
|
57 |
+
graph sequences." Bulletin of the Australian Mathematical Society, 33,
|
58 |
+
pp 67-70, 1986. https://doi.org/10.1017/S0004972700002872
|
59 |
+
.. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
|
60 |
+
Casopis Pest. Mat. 80, 477-480, 1955.
|
61 |
+
.. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
|
62 |
+
Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
|
63 |
+
.. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
|
64 |
+
Chapman and Hall/CRC, 1996.
|
65 |
+
"""
|
66 |
+
if method == "eg":
|
67 |
+
valid = is_valid_degree_sequence_erdos_gallai(list(sequence))
|
68 |
+
elif method == "hh":
|
69 |
+
valid = is_valid_degree_sequence_havel_hakimi(list(sequence))
|
70 |
+
else:
|
71 |
+
msg = "`method` must be 'eg' or 'hh'"
|
72 |
+
raise nx.NetworkXException(msg)
|
73 |
+
return valid
|
74 |
+
|
75 |
+
|
76 |
+
def _basic_graphical_tests(deg_sequence):
|
77 |
+
# Sort and perform some simple tests on the sequence
|
78 |
+
deg_sequence = nx.utils.make_list_of_ints(deg_sequence)
|
79 |
+
p = len(deg_sequence)
|
80 |
+
num_degs = [0] * p
|
81 |
+
dmax, dmin, dsum, n = 0, p, 0, 0
|
82 |
+
for d in deg_sequence:
|
83 |
+
# Reject if degree is negative or larger than the sequence length
|
84 |
+
if d < 0 or d >= p:
|
85 |
+
raise nx.NetworkXUnfeasible
|
86 |
+
# Process only the non-zero integers
|
87 |
+
elif d > 0:
|
88 |
+
dmax, dmin, dsum, n = max(dmax, d), min(dmin, d), dsum + d, n + 1
|
89 |
+
num_degs[d] += 1
|
90 |
+
# Reject sequence if it has odd sum or is oversaturated
|
91 |
+
if dsum % 2 or dsum > n * (n - 1):
|
92 |
+
raise nx.NetworkXUnfeasible
|
93 |
+
return dmax, dmin, dsum, n, num_degs
|
94 |
+
|
95 |
+
|
96 |
+
@nx._dispatchable(graphs=None)
|
97 |
+
def is_valid_degree_sequence_havel_hakimi(deg_sequence):
|
98 |
+
r"""Returns True if deg_sequence can be realized by a simple graph.
|
99 |
+
|
100 |
+
The validation proceeds using the Havel-Hakimi theorem
|
101 |
+
[havel1955]_, [hakimi1962]_, [CL1996]_.
|
102 |
+
Worst-case run time is $O(s)$ where $s$ is the sum of the sequence.
|
103 |
+
|
104 |
+
Parameters
|
105 |
+
----------
|
106 |
+
deg_sequence : list
|
107 |
+
A list of integers where each element specifies the degree of a node
|
108 |
+
in a graph.
|
109 |
+
|
110 |
+
Returns
|
111 |
+
-------
|
112 |
+
valid : bool
|
113 |
+
True if deg_sequence is graphical and False if not.
|
114 |
+
|
115 |
+
Examples
|
116 |
+
--------
|
117 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
118 |
+
>>> sequence = (d for _, d in G.degree())
|
119 |
+
>>> nx.is_valid_degree_sequence_havel_hakimi(sequence)
|
120 |
+
True
|
121 |
+
|
122 |
+
To test a non-valid sequence:
|
123 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
124 |
+
>>> sequence_list[-1] += 1
|
125 |
+
>>> nx.is_valid_degree_sequence_havel_hakimi(sequence_list)
|
126 |
+
False
|
127 |
+
|
128 |
+
Notes
|
129 |
+
-----
|
130 |
+
The ZZ condition says that for the sequence d if
|
131 |
+
|
132 |
+
.. math::
|
133 |
+
|d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
|
134 |
+
|
135 |
+
then d is graphical. This was shown in Theorem 6 in [1]_.
|
136 |
+
|
137 |
+
References
|
138 |
+
----------
|
139 |
+
.. [1] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
|
140 |
+
of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
|
141 |
+
.. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
|
142 |
+
Casopis Pest. Mat. 80, 477-480, 1955.
|
143 |
+
.. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
|
144 |
+
Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
|
145 |
+
.. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
|
146 |
+
Chapman and Hall/CRC, 1996.
|
147 |
+
"""
|
148 |
+
try:
|
149 |
+
dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
|
150 |
+
except nx.NetworkXUnfeasible:
|
151 |
+
return False
|
152 |
+
# Accept if sequence has no non-zero degrees or passes the ZZ condition
|
153 |
+
if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
|
154 |
+
return True
|
155 |
+
|
156 |
+
modstubs = [0] * (dmax + 1)
|
157 |
+
# Successively reduce degree sequence by removing the maximum degree
|
158 |
+
while n > 0:
|
159 |
+
# Retrieve the maximum degree in the sequence
|
160 |
+
while num_degs[dmax] == 0:
|
161 |
+
dmax -= 1
|
162 |
+
# If there are not enough stubs to connect to, then the sequence is
|
163 |
+
# not graphical
|
164 |
+
if dmax > n - 1:
|
165 |
+
return False
|
166 |
+
|
167 |
+
# Remove largest stub in list
|
168 |
+
num_degs[dmax], n = num_degs[dmax] - 1, n - 1
|
169 |
+
# Reduce the next dmax largest stubs
|
170 |
+
mslen = 0
|
171 |
+
k = dmax
|
172 |
+
for i in range(dmax):
|
173 |
+
while num_degs[k] == 0:
|
174 |
+
k -= 1
|
175 |
+
num_degs[k], n = num_degs[k] - 1, n - 1
|
176 |
+
if k > 1:
|
177 |
+
modstubs[mslen] = k - 1
|
178 |
+
mslen += 1
|
179 |
+
# Add back to the list any non-zero stubs that were removed
|
180 |
+
for i in range(mslen):
|
181 |
+
stub = modstubs[i]
|
182 |
+
num_degs[stub], n = num_degs[stub] + 1, n + 1
|
183 |
+
return True
|
184 |
+
|
185 |
+
|
186 |
+
@nx._dispatchable(graphs=None)
|
187 |
+
def is_valid_degree_sequence_erdos_gallai(deg_sequence):
|
188 |
+
r"""Returns True if deg_sequence can be realized by a simple graph.
|
189 |
+
|
190 |
+
The validation is done using the Erdős-Gallai theorem [EG1960]_.
|
191 |
+
|
192 |
+
Parameters
|
193 |
+
----------
|
194 |
+
deg_sequence : list
|
195 |
+
A list of integers
|
196 |
+
|
197 |
+
Returns
|
198 |
+
-------
|
199 |
+
valid : bool
|
200 |
+
True if deg_sequence is graphical and False if not.
|
201 |
+
|
202 |
+
Examples
|
203 |
+
--------
|
204 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
205 |
+
>>> sequence = (d for _, d in G.degree())
|
206 |
+
>>> nx.is_valid_degree_sequence_erdos_gallai(sequence)
|
207 |
+
True
|
208 |
+
|
209 |
+
To test a non-valid sequence:
|
210 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
211 |
+
>>> sequence_list[-1] += 1
|
212 |
+
>>> nx.is_valid_degree_sequence_erdos_gallai(sequence_list)
|
213 |
+
False
|
214 |
+
|
215 |
+
Notes
|
216 |
+
-----
|
217 |
+
|
218 |
+
This implementation uses an equivalent form of the Erdős-Gallai criterion.
|
219 |
+
Worst-case run time is $O(n)$ where $n$ is the length of the sequence.
|
220 |
+
|
221 |
+
Specifically, a sequence d is graphical if and only if the
|
222 |
+
sum of the sequence is even and for all strong indices k in the sequence,
|
223 |
+
|
224 |
+
.. math::
|
225 |
+
|
226 |
+
\sum_{i=1}^{k} d_i \leq k(k-1) + \sum_{j=k+1}^{n} \min(d_i,k)
|
227 |
+
= k(n-1) - ( k \sum_{j=0}^{k-1} n_j - \sum_{j=0}^{k-1} j n_j )
|
228 |
+
|
229 |
+
A strong index k is any index where d_k >= k and the value n_j is the
|
230 |
+
number of occurrences of j in d. The maximal strong index is called the
|
231 |
+
Durfee index.
|
232 |
+
|
233 |
+
This particular rearrangement comes from the proof of Theorem 3 in [2]_.
|
234 |
+
|
235 |
+
The ZZ condition says that for the sequence d if
|
236 |
+
|
237 |
+
.. math::
|
238 |
+
|d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
|
239 |
+
|
240 |
+
then d is graphical. This was shown in Theorem 6 in [2]_.
|
241 |
+
|
242 |
+
References
|
243 |
+
----------
|
244 |
+
.. [1] A. Tripathi and S. Vijay. "A note on a theorem of Erdős & Gallai",
|
245 |
+
Discrete Mathematics, 265, pp. 417-420 (2003).
|
246 |
+
.. [2] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
|
247 |
+
of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
|
248 |
+
.. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
|
249 |
+
"""
|
250 |
+
try:
|
251 |
+
dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
|
252 |
+
except nx.NetworkXUnfeasible:
|
253 |
+
return False
|
254 |
+
# Accept if sequence has no non-zero degrees or passes the ZZ condition
|
255 |
+
if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
|
256 |
+
return True
|
257 |
+
|
258 |
+
# Perform the EG checks using the reformulation of Zverovich and Zverovich
|
259 |
+
k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0
|
260 |
+
for dk in range(dmax, dmin - 1, -1):
|
261 |
+
if dk < k + 1: # Check if already past Durfee index
|
262 |
+
return True
|
263 |
+
if num_degs[dk] > 0:
|
264 |
+
run_size = num_degs[dk] # Process a run of identical-valued degrees
|
265 |
+
if dk < k + run_size: # Check if end of run is past Durfee index
|
266 |
+
run_size = dk - k # Adjust back to Durfee index
|
267 |
+
sum_deg += run_size * dk
|
268 |
+
for v in range(run_size):
|
269 |
+
sum_nj += num_degs[k + v]
|
270 |
+
sum_jnj += (k + v) * num_degs[k + v]
|
271 |
+
k += run_size
|
272 |
+
if sum_deg > k * (n - 1) - k * sum_nj + sum_jnj:
|
273 |
+
return False
|
274 |
+
return True
|
275 |
+
|
276 |
+
|
277 |
+
@nx._dispatchable(graphs=None)
|
278 |
+
def is_multigraphical(sequence):
|
279 |
+
"""Returns True if some multigraph can realize the sequence.
|
280 |
+
|
281 |
+
Parameters
|
282 |
+
----------
|
283 |
+
sequence : list
|
284 |
+
A list of integers
|
285 |
+
|
286 |
+
Returns
|
287 |
+
-------
|
288 |
+
valid : bool
|
289 |
+
True if deg_sequence is a multigraphic degree sequence and False if not.
|
290 |
+
|
291 |
+
Examples
|
292 |
+
--------
|
293 |
+
>>> G = nx.MultiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
294 |
+
>>> sequence = (d for _, d in G.degree())
|
295 |
+
>>> nx.is_multigraphical(sequence)
|
296 |
+
True
|
297 |
+
|
298 |
+
To test a non-multigraphical sequence:
|
299 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
300 |
+
>>> sequence_list[-1] += 1
|
301 |
+
>>> nx.is_multigraphical(sequence_list)
|
302 |
+
False
|
303 |
+
|
304 |
+
Notes
|
305 |
+
-----
|
306 |
+
The worst-case run time is $O(n)$ where $n$ is the length of the sequence.
|
307 |
+
|
308 |
+
References
|
309 |
+
----------
|
310 |
+
.. [1] S. L. Hakimi. "On the realizability of a set of integers as
|
311 |
+
degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506
|
312 |
+
(1962).
|
313 |
+
"""
|
314 |
+
try:
|
315 |
+
deg_sequence = nx.utils.make_list_of_ints(sequence)
|
316 |
+
except nx.NetworkXError:
|
317 |
+
return False
|
318 |
+
dsum, dmax = 0, 0
|
319 |
+
for d in deg_sequence:
|
320 |
+
if d < 0:
|
321 |
+
return False
|
322 |
+
dsum, dmax = dsum + d, max(dmax, d)
|
323 |
+
if dsum % 2 or dsum < 2 * dmax:
|
324 |
+
return False
|
325 |
+
return True
|
326 |
+
|
327 |
+
|
328 |
+
@nx._dispatchable(graphs=None)
|
329 |
+
def is_pseudographical(sequence):
|
330 |
+
"""Returns True if some pseudograph can realize the sequence.
|
331 |
+
|
332 |
+
Every nonnegative integer sequence with an even sum is pseudographical
|
333 |
+
(see [1]_).
|
334 |
+
|
335 |
+
Parameters
|
336 |
+
----------
|
337 |
+
sequence : list or iterable container
|
338 |
+
A sequence of integer node degrees
|
339 |
+
|
340 |
+
Returns
|
341 |
+
-------
|
342 |
+
valid : bool
|
343 |
+
True if the sequence is a pseudographic degree sequence and False if not.
|
344 |
+
|
345 |
+
Examples
|
346 |
+
--------
|
347 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
348 |
+
>>> sequence = (d for _, d in G.degree())
|
349 |
+
>>> nx.is_pseudographical(sequence)
|
350 |
+
True
|
351 |
+
|
352 |
+
To test a non-pseudographical sequence:
|
353 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
354 |
+
>>> sequence_list[-1] += 1
|
355 |
+
>>> nx.is_pseudographical(sequence_list)
|
356 |
+
False
|
357 |
+
|
358 |
+
Notes
|
359 |
+
-----
|
360 |
+
The worst-case run time is $O(n)$ where n is the length of the sequence.
|
361 |
+
|
362 |
+
References
|
363 |
+
----------
|
364 |
+
.. [1] F. Boesch and F. Harary. "Line removal algorithms for graphs
|
365 |
+
and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12),
|
366 |
+
pp. 778-782 (1976).
|
367 |
+
"""
|
368 |
+
try:
|
369 |
+
deg_sequence = nx.utils.make_list_of_ints(sequence)
|
370 |
+
except nx.NetworkXError:
|
371 |
+
return False
|
372 |
+
return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0
|
373 |
+
|
374 |
+
|
375 |
+
@nx._dispatchable(graphs=None)
|
376 |
+
def is_digraphical(in_sequence, out_sequence):
|
377 |
+
r"""Returns True if some directed graph can realize the in- and out-degree
|
378 |
+
sequences.
|
379 |
+
|
380 |
+
Parameters
|
381 |
+
----------
|
382 |
+
in_sequence : list or iterable container
|
383 |
+
A sequence of integer node in-degrees
|
384 |
+
|
385 |
+
out_sequence : list or iterable container
|
386 |
+
A sequence of integer node out-degrees
|
387 |
+
|
388 |
+
Returns
|
389 |
+
-------
|
390 |
+
valid : bool
|
391 |
+
True if in and out-sequences are digraphic False if not.
|
392 |
+
|
393 |
+
Examples
|
394 |
+
--------
|
395 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
396 |
+
>>> in_seq = (d for n, d in G.in_degree())
|
397 |
+
>>> out_seq = (d for n, d in G.out_degree())
|
398 |
+
>>> nx.is_digraphical(in_seq, out_seq)
|
399 |
+
True
|
400 |
+
|
401 |
+
To test a non-digraphical scenario:
|
402 |
+
>>> in_seq_list = [d for n, d in G.in_degree()]
|
403 |
+
>>> in_seq_list[-1] += 1
|
404 |
+
>>> nx.is_digraphical(in_seq_list, out_seq)
|
405 |
+
False
|
406 |
+
|
407 |
+
Notes
|
408 |
+
-----
|
409 |
+
This algorithm is from Kleitman and Wang [1]_.
|
410 |
+
The worst case runtime is $O(s \times \log n)$ where $s$ and $n$ are the
|
411 |
+
sum and length of the sequences respectively.
|
412 |
+
|
413 |
+
References
|
414 |
+
----------
|
415 |
+
.. [1] D.J. Kleitman and D.L. Wang
|
416 |
+
Algorithms for Constructing Graphs and Digraphs with Given Valences
|
417 |
+
and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973)
|
418 |
+
"""
|
419 |
+
try:
|
420 |
+
in_deg_sequence = nx.utils.make_list_of_ints(in_sequence)
|
421 |
+
out_deg_sequence = nx.utils.make_list_of_ints(out_sequence)
|
422 |
+
except nx.NetworkXError:
|
423 |
+
return False
|
424 |
+
# Process the sequences and form two heaps to store degree pairs with
|
425 |
+
# either zero or non-zero out degrees
|
426 |
+
sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence)
|
427 |
+
maxn = max(nin, nout)
|
428 |
+
maxin = 0
|
429 |
+
if maxn == 0:
|
430 |
+
return True
|
431 |
+
stubheap, zeroheap = [], []
|
432 |
+
for n in range(maxn):
|
433 |
+
in_deg, out_deg = 0, 0
|
434 |
+
if n < nout:
|
435 |
+
out_deg = out_deg_sequence[n]
|
436 |
+
if n < nin:
|
437 |
+
in_deg = in_deg_sequence[n]
|
438 |
+
if in_deg < 0 or out_deg < 0:
|
439 |
+
return False
|
440 |
+
sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg)
|
441 |
+
if in_deg > 0:
|
442 |
+
stubheap.append((-1 * out_deg, -1 * in_deg))
|
443 |
+
elif out_deg > 0:
|
444 |
+
zeroheap.append(-1 * out_deg)
|
445 |
+
if sumin != sumout:
|
446 |
+
return False
|
447 |
+
heapq.heapify(stubheap)
|
448 |
+
heapq.heapify(zeroheap)
|
449 |
+
|
450 |
+
modstubs = [(0, 0)] * (maxin + 1)
|
451 |
+
# Successively reduce degree sequence by removing the maximum out degree
|
452 |
+
while stubheap:
|
453 |
+
# Take the first value in the sequence with non-zero in degree
|
454 |
+
(freeout, freein) = heapq.heappop(stubheap)
|
455 |
+
freein *= -1
|
456 |
+
if freein > len(stubheap) + len(zeroheap):
|
457 |
+
return False
|
458 |
+
|
459 |
+
# Attach out stubs to the nodes with the most in stubs
|
460 |
+
mslen = 0
|
461 |
+
for i in range(freein):
|
462 |
+
if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0]):
|
463 |
+
stubout = heapq.heappop(zeroheap)
|
464 |
+
stubin = 0
|
465 |
+
else:
|
466 |
+
(stubout, stubin) = heapq.heappop(stubheap)
|
467 |
+
if stubout == 0:
|
468 |
+
return False
|
469 |
+
# Check if target is now totally connected
|
470 |
+
if stubout + 1 < 0 or stubin < 0:
|
471 |
+
modstubs[mslen] = (stubout + 1, stubin)
|
472 |
+
mslen += 1
|
473 |
+
|
474 |
+
# Add back the nodes to the heap that still have available stubs
|
475 |
+
for i in range(mslen):
|
476 |
+
stub = modstubs[i]
|
477 |
+
if stub[1] < 0:
|
478 |
+
heapq.heappush(stubheap, stub)
|
479 |
+
else:
|
480 |
+
heapq.heappush(zeroheap, stub[0])
|
481 |
+
if freeout < 0:
|
482 |
+
heapq.heappush(zeroheap, freeout)
|
483 |
+
return True
|
venv/lib/python3.10/site-packages/networkx/algorithms/hybrid.py
ADDED
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Provides functions for finding and testing for locally `(k, l)`-connected
|
3 |
+
graphs.
|
4 |
+
|
5 |
+
"""
|
6 |
+
import copy
|
7 |
+
|
8 |
+
import networkx as nx
|
9 |
+
|
10 |
+
__all__ = ["kl_connected_subgraph", "is_kl_connected"]
|
11 |
+
|
12 |
+
|
13 |
+
@nx._dispatchable(returns_graph=True)
|
14 |
+
def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False):
|
15 |
+
"""Returns the maximum locally `(k, l)`-connected subgraph of `G`.
|
16 |
+
|
17 |
+
A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the
|
18 |
+
graph there are at least `l` edge-disjoint paths of length at most `k`
|
19 |
+
joining `u` to `v`.
|
20 |
+
|
21 |
+
Parameters
|
22 |
+
----------
|
23 |
+
G : NetworkX graph
|
24 |
+
The graph in which to find a maximum locally `(k, l)`-connected
|
25 |
+
subgraph.
|
26 |
+
|
27 |
+
k : integer
|
28 |
+
The maximum length of paths to consider. A higher number means a looser
|
29 |
+
connectivity requirement.
|
30 |
+
|
31 |
+
l : integer
|
32 |
+
The number of edge-disjoint paths. A higher number means a stricter
|
33 |
+
connectivity requirement.
|
34 |
+
|
35 |
+
low_memory : bool
|
36 |
+
If this is True, this function uses an algorithm that uses slightly
|
37 |
+
more time but less memory.
|
38 |
+
|
39 |
+
same_as_graph : bool
|
40 |
+
If True then return a tuple of the form `(H, is_same)`,
|
41 |
+
where `H` is the maximum locally `(k, l)`-connected subgraph and
|
42 |
+
`is_same` is a Boolean representing whether `G` is locally `(k,
|
43 |
+
l)`-connected (and hence, whether `H` is simply a copy of the input
|
44 |
+
graph `G`).
|
45 |
+
|
46 |
+
Returns
|
47 |
+
-------
|
48 |
+
NetworkX graph or two-tuple
|
49 |
+
If `same_as_graph` is True, then this function returns a
|
50 |
+
two-tuple as described above. Otherwise, it returns only the maximum
|
51 |
+
locally `(k, l)`-connected subgraph.
|
52 |
+
|
53 |
+
See also
|
54 |
+
--------
|
55 |
+
is_kl_connected
|
56 |
+
|
57 |
+
References
|
58 |
+
----------
|
59 |
+
.. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid
|
60 |
+
Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg,
|
61 |
+
2004. 89--104.
|
62 |
+
|
63 |
+
"""
|
64 |
+
H = copy.deepcopy(G) # subgraph we construct by removing from G
|
65 |
+
|
66 |
+
graphOK = True
|
67 |
+
deleted_some = True # hack to start off the while loop
|
68 |
+
while deleted_some:
|
69 |
+
deleted_some = False
|
70 |
+
# We use `for edge in list(H.edges()):` instead of
|
71 |
+
# `for edge in H.edges():` because we edit the graph `H` in
|
72 |
+
# the loop. Hence using an iterator will result in
|
73 |
+
# `RuntimeError: dictionary changed size during iteration`
|
74 |
+
for edge in list(H.edges()):
|
75 |
+
(u, v) = edge
|
76 |
+
# Get copy of graph needed for this search
|
77 |
+
if low_memory:
|
78 |
+
verts = {u, v}
|
79 |
+
for i in range(k):
|
80 |
+
for w in verts.copy():
|
81 |
+
verts.update(G[w])
|
82 |
+
G2 = G.subgraph(verts).copy()
|
83 |
+
else:
|
84 |
+
G2 = copy.deepcopy(G)
|
85 |
+
###
|
86 |
+
path = [u, v]
|
87 |
+
cnt = 0
|
88 |
+
accept = 0
|
89 |
+
while path:
|
90 |
+
cnt += 1 # Found a path
|
91 |
+
if cnt >= l:
|
92 |
+
accept = 1
|
93 |
+
break
|
94 |
+
# record edges along this graph
|
95 |
+
prev = u
|
96 |
+
for w in path:
|
97 |
+
if prev != w:
|
98 |
+
G2.remove_edge(prev, w)
|
99 |
+
prev = w
|
100 |
+
# path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1?
|
101 |
+
try:
|
102 |
+
path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1?
|
103 |
+
except nx.NetworkXNoPath:
|
104 |
+
path = False
|
105 |
+
# No Other Paths
|
106 |
+
if accept == 0:
|
107 |
+
H.remove_edge(u, v)
|
108 |
+
deleted_some = True
|
109 |
+
if graphOK:
|
110 |
+
graphOK = False
|
111 |
+
# We looked through all edges and removed none of them.
|
112 |
+
# So, H is the maximal (k,l)-connected subgraph of G
|
113 |
+
if same_as_graph:
|
114 |
+
return (H, graphOK)
|
115 |
+
return H
|
116 |
+
|
117 |
+
|
118 |
+
@nx._dispatchable
|
119 |
+
def is_kl_connected(G, k, l, low_memory=False):
|
120 |
+
"""Returns True if and only if `G` is locally `(k, l)`-connected.
|
121 |
+
|
122 |
+
A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the
|
123 |
+
graph there are at least `l` edge-disjoint paths of length at most `k`
|
124 |
+
joining `u` to `v`.
|
125 |
+
|
126 |
+
Parameters
|
127 |
+
----------
|
128 |
+
G : NetworkX graph
|
129 |
+
The graph to test for local `(k, l)`-connectedness.
|
130 |
+
|
131 |
+
k : integer
|
132 |
+
The maximum length of paths to consider. A higher number means a looser
|
133 |
+
connectivity requirement.
|
134 |
+
|
135 |
+
l : integer
|
136 |
+
The number of edge-disjoint paths. A higher number means a stricter
|
137 |
+
connectivity requirement.
|
138 |
+
|
139 |
+
low_memory : bool
|
140 |
+
If this is True, this function uses an algorithm that uses slightly
|
141 |
+
more time but less memory.
|
142 |
+
|
143 |
+
Returns
|
144 |
+
-------
|
145 |
+
bool
|
146 |
+
Whether the graph is locally `(k, l)`-connected subgraph.
|
147 |
+
|
148 |
+
See also
|
149 |
+
--------
|
150 |
+
kl_connected_subgraph
|
151 |
+
|
152 |
+
References
|
153 |
+
----------
|
154 |
+
.. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid
|
155 |
+
Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg,
|
156 |
+
2004. 89--104.
|
157 |
+
|
158 |
+
"""
|
159 |
+
graphOK = True
|
160 |
+
for edge in G.edges():
|
161 |
+
(u, v) = edge
|
162 |
+
# Get copy of graph needed for this search
|
163 |
+
if low_memory:
|
164 |
+
verts = {u, v}
|
165 |
+
for i in range(k):
|
166 |
+
[verts.update(G.neighbors(w)) for w in verts.copy()]
|
167 |
+
G2 = G.subgraph(verts)
|
168 |
+
else:
|
169 |
+
G2 = copy.deepcopy(G)
|
170 |
+
###
|
171 |
+
path = [u, v]
|
172 |
+
cnt = 0
|
173 |
+
accept = 0
|
174 |
+
while path:
|
175 |
+
cnt += 1 # Found a path
|
176 |
+
if cnt >= l:
|
177 |
+
accept = 1
|
178 |
+
break
|
179 |
+
# record edges along this graph
|
180 |
+
prev = u
|
181 |
+
for w in path:
|
182 |
+
if w != prev:
|
183 |
+
G2.remove_edge(prev, w)
|
184 |
+
prev = w
|
185 |
+
# path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1?
|
186 |
+
try:
|
187 |
+
path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1?
|
188 |
+
except nx.NetworkXNoPath:
|
189 |
+
path = False
|
190 |
+
# No Other Paths
|
191 |
+
if accept == 0:
|
192 |
+
graphOK = False
|
193 |
+
break
|
194 |
+
# return status
|
195 |
+
return graphOK
|
venv/lib/python3.10/site-packages/networkx/algorithms/matching.py
ADDED
@@ -0,0 +1,1151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing and verifying matchings in a graph."""
|
2 |
+
from collections import Counter
|
3 |
+
from itertools import combinations, repeat
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
from networkx.utils import not_implemented_for
|
7 |
+
|
8 |
+
__all__ = [
|
9 |
+
"is_matching",
|
10 |
+
"is_maximal_matching",
|
11 |
+
"is_perfect_matching",
|
12 |
+
"max_weight_matching",
|
13 |
+
"min_weight_matching",
|
14 |
+
"maximal_matching",
|
15 |
+
]
|
16 |
+
|
17 |
+
|
18 |
+
@not_implemented_for("multigraph")
|
19 |
+
@not_implemented_for("directed")
|
20 |
+
@nx._dispatchable
|
21 |
+
def maximal_matching(G):
|
22 |
+
r"""Find a maximal matching in the graph.
|
23 |
+
|
24 |
+
A matching is a subset of edges in which no node occurs more than once.
|
25 |
+
A maximal matching cannot add more edges and still be a matching.
|
26 |
+
|
27 |
+
Parameters
|
28 |
+
----------
|
29 |
+
G : NetworkX graph
|
30 |
+
Undirected graph
|
31 |
+
|
32 |
+
Returns
|
33 |
+
-------
|
34 |
+
matching : set
|
35 |
+
A maximal matching of the graph.
|
36 |
+
|
37 |
+
Examples
|
38 |
+
--------
|
39 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5)])
|
40 |
+
>>> sorted(nx.maximal_matching(G))
|
41 |
+
[(1, 2), (3, 5)]
|
42 |
+
|
43 |
+
Notes
|
44 |
+
-----
|
45 |
+
The algorithm greedily selects a maximal matching M of the graph G
|
46 |
+
(i.e. no superset of M exists). It runs in $O(|E|)$ time.
|
47 |
+
"""
|
48 |
+
matching = set()
|
49 |
+
nodes = set()
|
50 |
+
for edge in G.edges():
|
51 |
+
# If the edge isn't covered, add it to the matching
|
52 |
+
# then remove neighborhood of u and v from consideration.
|
53 |
+
u, v = edge
|
54 |
+
if u not in nodes and v not in nodes and u != v:
|
55 |
+
matching.add(edge)
|
56 |
+
nodes.update(edge)
|
57 |
+
return matching
|
58 |
+
|
59 |
+
|
60 |
+
def matching_dict_to_set(matching):
|
61 |
+
"""Converts matching dict format to matching set format
|
62 |
+
|
63 |
+
Converts a dictionary representing a matching (as returned by
|
64 |
+
:func:`max_weight_matching`) to a set representing a matching (as
|
65 |
+
returned by :func:`maximal_matching`).
|
66 |
+
|
67 |
+
In the definition of maximal matching adopted by NetworkX,
|
68 |
+
self-loops are not allowed, so the provided dictionary is expected
|
69 |
+
to never have any mapping from a key to itself. However, the
|
70 |
+
dictionary is expected to have mirrored key/value pairs, for
|
71 |
+
example, key ``u`` with value ``v`` and key ``v`` with value ``u``.
|
72 |
+
|
73 |
+
"""
|
74 |
+
edges = set()
|
75 |
+
for edge in matching.items():
|
76 |
+
u, v = edge
|
77 |
+
if (v, u) in edges or edge in edges:
|
78 |
+
continue
|
79 |
+
if u == v:
|
80 |
+
raise nx.NetworkXError(f"Selfloops cannot appear in matchings {edge}")
|
81 |
+
edges.add(edge)
|
82 |
+
return edges
|
83 |
+
|
84 |
+
|
85 |
+
@nx._dispatchable
|
86 |
+
def is_matching(G, matching):
|
87 |
+
"""Return True if ``matching`` is a valid matching of ``G``
|
88 |
+
|
89 |
+
A *matching* in a graph is a set of edges in which no two distinct
|
90 |
+
edges share a common endpoint. Each node is incident to at most one
|
91 |
+
edge in the matching. The edges are said to be independent.
|
92 |
+
|
93 |
+
Parameters
|
94 |
+
----------
|
95 |
+
G : NetworkX graph
|
96 |
+
|
97 |
+
matching : dict or set
|
98 |
+
A dictionary or set representing a matching. If a dictionary, it
|
99 |
+
must have ``matching[u] == v`` and ``matching[v] == u`` for each
|
100 |
+
edge ``(u, v)`` in the matching. If a set, it must have elements
|
101 |
+
of the form ``(u, v)``, where ``(u, v)`` is an edge in the
|
102 |
+
matching.
|
103 |
+
|
104 |
+
Returns
|
105 |
+
-------
|
106 |
+
bool
|
107 |
+
Whether the given set or dictionary represents a valid matching
|
108 |
+
in the graph.
|
109 |
+
|
110 |
+
Raises
|
111 |
+
------
|
112 |
+
NetworkXError
|
113 |
+
If the proposed matching has an edge to a node not in G.
|
114 |
+
Or if the matching is not a collection of 2-tuple edges.
|
115 |
+
|
116 |
+
Examples
|
117 |
+
--------
|
118 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5)])
|
119 |
+
>>> nx.is_maximal_matching(G, {1: 3, 2: 4}) # using dict to represent matching
|
120 |
+
True
|
121 |
+
|
122 |
+
>>> nx.is_matching(G, {(1, 3), (2, 4)}) # using set to represent matching
|
123 |
+
True
|
124 |
+
|
125 |
+
"""
|
126 |
+
if isinstance(matching, dict):
|
127 |
+
matching = matching_dict_to_set(matching)
|
128 |
+
|
129 |
+
nodes = set()
|
130 |
+
for edge in matching:
|
131 |
+
if len(edge) != 2:
|
132 |
+
raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
|
133 |
+
u, v = edge
|
134 |
+
if u not in G or v not in G:
|
135 |
+
raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
|
136 |
+
if u == v:
|
137 |
+
return False
|
138 |
+
if not G.has_edge(u, v):
|
139 |
+
return False
|
140 |
+
if u in nodes or v in nodes:
|
141 |
+
return False
|
142 |
+
nodes.update(edge)
|
143 |
+
return True
|
144 |
+
|
145 |
+
|
146 |
+
@nx._dispatchable
|
147 |
+
def is_maximal_matching(G, matching):
|
148 |
+
"""Return True if ``matching`` is a maximal matching of ``G``
|
149 |
+
|
150 |
+
A *maximal matching* in a graph is a matching in which adding any
|
151 |
+
edge would cause the set to no longer be a valid matching.
|
152 |
+
|
153 |
+
Parameters
|
154 |
+
----------
|
155 |
+
G : NetworkX graph
|
156 |
+
|
157 |
+
matching : dict or set
|
158 |
+
A dictionary or set representing a matching. If a dictionary, it
|
159 |
+
must have ``matching[u] == v`` and ``matching[v] == u`` for each
|
160 |
+
edge ``(u, v)`` in the matching. If a set, it must have elements
|
161 |
+
of the form ``(u, v)``, where ``(u, v)`` is an edge in the
|
162 |
+
matching.
|
163 |
+
|
164 |
+
Returns
|
165 |
+
-------
|
166 |
+
bool
|
167 |
+
Whether the given set or dictionary represents a valid maximal
|
168 |
+
matching in the graph.
|
169 |
+
|
170 |
+
Examples
|
171 |
+
--------
|
172 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)])
|
173 |
+
>>> nx.is_maximal_matching(G, {(1, 2), (3, 4)})
|
174 |
+
True
|
175 |
+
|
176 |
+
"""
|
177 |
+
if isinstance(matching, dict):
|
178 |
+
matching = matching_dict_to_set(matching)
|
179 |
+
# If the given set is not a matching, then it is not a maximal matching.
|
180 |
+
edges = set()
|
181 |
+
nodes = set()
|
182 |
+
for edge in matching:
|
183 |
+
if len(edge) != 2:
|
184 |
+
raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
|
185 |
+
u, v = edge
|
186 |
+
if u not in G or v not in G:
|
187 |
+
raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
|
188 |
+
if u == v:
|
189 |
+
return False
|
190 |
+
if not G.has_edge(u, v):
|
191 |
+
return False
|
192 |
+
if u in nodes or v in nodes:
|
193 |
+
return False
|
194 |
+
nodes.update(edge)
|
195 |
+
edges.add(edge)
|
196 |
+
edges.add((v, u))
|
197 |
+
# A matching is maximal if adding any new edge from G to it
|
198 |
+
# causes the resulting set to match some node twice.
|
199 |
+
# Be careful to check for adding selfloops
|
200 |
+
for u, v in G.edges:
|
201 |
+
if (u, v) not in edges:
|
202 |
+
# could add edge (u, v) to edges and have a bigger matching
|
203 |
+
if u not in nodes and v not in nodes and u != v:
|
204 |
+
return False
|
205 |
+
return True
|
206 |
+
|
207 |
+
|
208 |
+
@nx._dispatchable
|
209 |
+
def is_perfect_matching(G, matching):
|
210 |
+
"""Return True if ``matching`` is a perfect matching for ``G``
|
211 |
+
|
212 |
+
A *perfect matching* in a graph is a matching in which exactly one edge
|
213 |
+
is incident upon each vertex.
|
214 |
+
|
215 |
+
Parameters
|
216 |
+
----------
|
217 |
+
G : NetworkX graph
|
218 |
+
|
219 |
+
matching : dict or set
|
220 |
+
A dictionary or set representing a matching. If a dictionary, it
|
221 |
+
must have ``matching[u] == v`` and ``matching[v] == u`` for each
|
222 |
+
edge ``(u, v)`` in the matching. If a set, it must have elements
|
223 |
+
of the form ``(u, v)``, where ``(u, v)`` is an edge in the
|
224 |
+
matching.
|
225 |
+
|
226 |
+
Returns
|
227 |
+
-------
|
228 |
+
bool
|
229 |
+
Whether the given set or dictionary represents a valid perfect
|
230 |
+
matching in the graph.
|
231 |
+
|
232 |
+
Examples
|
233 |
+
--------
|
234 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5), (4, 6)])
|
235 |
+
>>> my_match = {1: 2, 3: 5, 4: 6}
|
236 |
+
>>> nx.is_perfect_matching(G, my_match)
|
237 |
+
True
|
238 |
+
|
239 |
+
"""
|
240 |
+
if isinstance(matching, dict):
|
241 |
+
matching = matching_dict_to_set(matching)
|
242 |
+
|
243 |
+
nodes = set()
|
244 |
+
for edge in matching:
|
245 |
+
if len(edge) != 2:
|
246 |
+
raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
|
247 |
+
u, v = edge
|
248 |
+
if u not in G or v not in G:
|
249 |
+
raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
|
250 |
+
if u == v:
|
251 |
+
return False
|
252 |
+
if not G.has_edge(u, v):
|
253 |
+
return False
|
254 |
+
if u in nodes or v in nodes:
|
255 |
+
return False
|
256 |
+
nodes.update(edge)
|
257 |
+
return len(nodes) == len(G)
|
258 |
+
|
259 |
+
|
260 |
+
@not_implemented_for("multigraph")
|
261 |
+
@not_implemented_for("directed")
|
262 |
+
@nx._dispatchable(edge_attrs="weight")
|
263 |
+
def min_weight_matching(G, weight="weight"):
|
264 |
+
"""Computing a minimum-weight maximal matching of G.
|
265 |
+
|
266 |
+
Use the maximum-weight algorithm with edge weights subtracted
|
267 |
+
from the maximum weight of all edges.
|
268 |
+
|
269 |
+
A matching is a subset of edges in which no node occurs more than once.
|
270 |
+
The weight of a matching is the sum of the weights of its edges.
|
271 |
+
A maximal matching cannot add more edges and still be a matching.
|
272 |
+
The cardinality of a matching is the number of matched edges.
|
273 |
+
|
274 |
+
This method replaces the edge weights with 1 plus the maximum edge weight
|
275 |
+
minus the original edge weight.
|
276 |
+
|
277 |
+
new_weight = (max_weight + 1) - edge_weight
|
278 |
+
|
279 |
+
then runs :func:`max_weight_matching` with the new weights.
|
280 |
+
The max weight matching with these new weights corresponds
|
281 |
+
to the min weight matching using the original weights.
|
282 |
+
Adding 1 to the max edge weight keeps all edge weights positive
|
283 |
+
and as integers if they started as integers.
|
284 |
+
|
285 |
+
You might worry that adding 1 to each weight would make the algorithm
|
286 |
+
favor matchings with more edges. But we use the parameter
|
287 |
+
`maxcardinality=True` in `max_weight_matching` to ensure that the
|
288 |
+
number of edges in the competing matchings are the same and thus
|
289 |
+
the optimum does not change due to changes in the number of edges.
|
290 |
+
|
291 |
+
Read the documentation of `max_weight_matching` for more information.
|
292 |
+
|
293 |
+
Parameters
|
294 |
+
----------
|
295 |
+
G : NetworkX graph
|
296 |
+
Undirected graph
|
297 |
+
|
298 |
+
weight: string, optional (default='weight')
|
299 |
+
Edge data key corresponding to the edge weight.
|
300 |
+
If key not found, uses 1 as weight.
|
301 |
+
|
302 |
+
Returns
|
303 |
+
-------
|
304 |
+
matching : set
|
305 |
+
A minimal weight matching of the graph.
|
306 |
+
|
307 |
+
See Also
|
308 |
+
--------
|
309 |
+
max_weight_matching
|
310 |
+
"""
|
311 |
+
if len(G.edges) == 0:
|
312 |
+
return max_weight_matching(G, maxcardinality=True, weight=weight)
|
313 |
+
G_edges = G.edges(data=weight, default=1)
|
314 |
+
max_weight = 1 + max(w for _, _, w in G_edges)
|
315 |
+
InvG = nx.Graph()
|
316 |
+
edges = ((u, v, max_weight - w) for u, v, w in G_edges)
|
317 |
+
InvG.add_weighted_edges_from(edges, weight=weight)
|
318 |
+
return max_weight_matching(InvG, maxcardinality=True, weight=weight)
|
319 |
+
|
320 |
+
|
321 |
+
@not_implemented_for("multigraph")
|
322 |
+
@not_implemented_for("directed")
|
323 |
+
@nx._dispatchable(edge_attrs="weight")
|
324 |
+
def max_weight_matching(G, maxcardinality=False, weight="weight"):
|
325 |
+
"""Compute a maximum-weighted matching of G.
|
326 |
+
|
327 |
+
A matching is a subset of edges in which no node occurs more than once.
|
328 |
+
The weight of a matching is the sum of the weights of its edges.
|
329 |
+
A maximal matching cannot add more edges and still be a matching.
|
330 |
+
The cardinality of a matching is the number of matched edges.
|
331 |
+
|
332 |
+
Parameters
|
333 |
+
----------
|
334 |
+
G : NetworkX graph
|
335 |
+
Undirected graph
|
336 |
+
|
337 |
+
maxcardinality: bool, optional (default=False)
|
338 |
+
If maxcardinality is True, compute the maximum-cardinality matching
|
339 |
+
with maximum weight among all maximum-cardinality matchings.
|
340 |
+
|
341 |
+
weight: string, optional (default='weight')
|
342 |
+
Edge data key corresponding to the edge weight.
|
343 |
+
If key not found, uses 1 as weight.
|
344 |
+
|
345 |
+
|
346 |
+
Returns
|
347 |
+
-------
|
348 |
+
matching : set
|
349 |
+
A maximal matching of the graph.
|
350 |
+
|
351 |
+
Examples
|
352 |
+
--------
|
353 |
+
>>> G = nx.Graph()
|
354 |
+
>>> edges = [(1, 2, 6), (1, 3, 2), (2, 3, 1), (2, 4, 7), (3, 5, 9), (4, 5, 3)]
|
355 |
+
>>> G.add_weighted_edges_from(edges)
|
356 |
+
>>> sorted(nx.max_weight_matching(G))
|
357 |
+
[(2, 4), (5, 3)]
|
358 |
+
|
359 |
+
Notes
|
360 |
+
-----
|
361 |
+
If G has edges with weight attributes the edge data are used as
|
362 |
+
weight values else the weights are assumed to be 1.
|
363 |
+
|
364 |
+
This function takes time O(number_of_nodes ** 3).
|
365 |
+
|
366 |
+
If all edge weights are integers, the algorithm uses only integer
|
367 |
+
computations. If floating point weights are used, the algorithm
|
368 |
+
could return a slightly suboptimal matching due to numeric
|
369 |
+
precision errors.
|
370 |
+
|
371 |
+
This method is based on the "blossom" method for finding augmenting
|
372 |
+
paths and the "primal-dual" method for finding a matching of maximum
|
373 |
+
weight, both methods invented by Jack Edmonds [1]_.
|
374 |
+
|
375 |
+
Bipartite graphs can also be matched using the functions present in
|
376 |
+
:mod:`networkx.algorithms.bipartite.matching`.
|
377 |
+
|
378 |
+
References
|
379 |
+
----------
|
380 |
+
.. [1] "Efficient Algorithms for Finding Maximum Matching in Graphs",
|
381 |
+
Zvi Galil, ACM Computing Surveys, 1986.
|
382 |
+
"""
|
383 |
+
#
|
384 |
+
# The algorithm is taken from "Efficient Algorithms for Finding Maximum
|
385 |
+
# Matching in Graphs" by Zvi Galil, ACM Computing Surveys, 1986.
|
386 |
+
# It is based on the "blossom" method for finding augmenting paths and
|
387 |
+
# the "primal-dual" method for finding a matching of maximum weight, both
|
388 |
+
# methods invented by Jack Edmonds.
|
389 |
+
#
|
390 |
+
# A C program for maximum weight matching by Ed Rothberg was used
|
391 |
+
# extensively to validate this new code.
|
392 |
+
#
|
393 |
+
# Many terms used in the code comments are explained in the paper
|
394 |
+
# by Galil. You will probably need the paper to make sense of this code.
|
395 |
+
#
|
396 |
+
|
397 |
+
class NoNode:
|
398 |
+
"""Dummy value which is different from any node."""
|
399 |
+
|
400 |
+
class Blossom:
|
401 |
+
"""Representation of a non-trivial blossom or sub-blossom."""
|
402 |
+
|
403 |
+
__slots__ = ["childs", "edges", "mybestedges"]
|
404 |
+
|
405 |
+
# b.childs is an ordered list of b's sub-blossoms, starting with
|
406 |
+
# the base and going round the blossom.
|
407 |
+
|
408 |
+
# b.edges is the list of b's connecting edges, such that
|
409 |
+
# b.edges[i] = (v, w) where v is a vertex in b.childs[i]
|
410 |
+
# and w is a vertex in b.childs[wrap(i+1)].
|
411 |
+
|
412 |
+
# If b is a top-level S-blossom,
|
413 |
+
# b.mybestedges is a list of least-slack edges to neighboring
|
414 |
+
# S-blossoms, or None if no such list has been computed yet.
|
415 |
+
# This is used for efficient computation of delta3.
|
416 |
+
|
417 |
+
# Generate the blossom's leaf vertices.
|
418 |
+
def leaves(self):
|
419 |
+
stack = [*self.childs]
|
420 |
+
while stack:
|
421 |
+
t = stack.pop()
|
422 |
+
if isinstance(t, Blossom):
|
423 |
+
stack.extend(t.childs)
|
424 |
+
else:
|
425 |
+
yield t
|
426 |
+
|
427 |
+
# Get a list of vertices.
|
428 |
+
gnodes = list(G)
|
429 |
+
if not gnodes:
|
430 |
+
return set() # don't bother with empty graphs
|
431 |
+
|
432 |
+
# Find the maximum edge weight.
|
433 |
+
maxweight = 0
|
434 |
+
allinteger = True
|
435 |
+
for i, j, d in G.edges(data=True):
|
436 |
+
wt = d.get(weight, 1)
|
437 |
+
if i != j and wt > maxweight:
|
438 |
+
maxweight = wt
|
439 |
+
allinteger = allinteger and (str(type(wt)).split("'")[1] in ("int", "long"))
|
440 |
+
|
441 |
+
# If v is a matched vertex, mate[v] is its partner vertex.
|
442 |
+
# If v is a single vertex, v does not occur as a key in mate.
|
443 |
+
# Initially all vertices are single; updated during augmentation.
|
444 |
+
mate = {}
|
445 |
+
|
446 |
+
# If b is a top-level blossom,
|
447 |
+
# label.get(b) is None if b is unlabeled (free),
|
448 |
+
# 1 if b is an S-blossom,
|
449 |
+
# 2 if b is a T-blossom.
|
450 |
+
# The label of a vertex is found by looking at the label of its top-level
|
451 |
+
# containing blossom.
|
452 |
+
# If v is a vertex inside a T-blossom, label[v] is 2 iff v is reachable
|
453 |
+
# from an S-vertex outside the blossom.
|
454 |
+
# Labels are assigned during a stage and reset after each augmentation.
|
455 |
+
label = {}
|
456 |
+
|
457 |
+
# If b is a labeled top-level blossom,
|
458 |
+
# labeledge[b] = (v, w) is the edge through which b obtained its label
|
459 |
+
# such that w is a vertex in b, or None if b's base vertex is single.
|
460 |
+
# If w is a vertex inside a T-blossom and label[w] == 2,
|
461 |
+
# labeledge[w] = (v, w) is an edge through which w is reachable from
|
462 |
+
# outside the blossom.
|
463 |
+
labeledge = {}
|
464 |
+
|
465 |
+
# If v is a vertex, inblossom[v] is the top-level blossom to which v
|
466 |
+
# belongs.
|
467 |
+
# If v is a top-level vertex, inblossom[v] == v since v is itself
|
468 |
+
# a (trivial) top-level blossom.
|
469 |
+
# Initially all vertices are top-level trivial blossoms.
|
470 |
+
inblossom = dict(zip(gnodes, gnodes))
|
471 |
+
|
472 |
+
# If b is a sub-blossom,
|
473 |
+
# blossomparent[b] is its immediate parent (sub-)blossom.
|
474 |
+
# If b is a top-level blossom, blossomparent[b] is None.
|
475 |
+
blossomparent = dict(zip(gnodes, repeat(None)))
|
476 |
+
|
477 |
+
# If b is a (sub-)blossom,
|
478 |
+
# blossombase[b] is its base VERTEX (i.e. recursive sub-blossom).
|
479 |
+
blossombase = dict(zip(gnodes, gnodes))
|
480 |
+
|
481 |
+
# If w is a free vertex (or an unreached vertex inside a T-blossom),
|
482 |
+
# bestedge[w] = (v, w) is the least-slack edge from an S-vertex,
|
483 |
+
# or None if there is no such edge.
|
484 |
+
# If b is a (possibly trivial) top-level S-blossom,
|
485 |
+
# bestedge[b] = (v, w) is the least-slack edge to a different S-blossom
|
486 |
+
# (v inside b), or None if there is no such edge.
|
487 |
+
# This is used for efficient computation of delta2 and delta3.
|
488 |
+
bestedge = {}
|
489 |
+
|
490 |
+
# If v is a vertex,
|
491 |
+
# dualvar[v] = 2 * u(v) where u(v) is the v's variable in the dual
|
492 |
+
# optimization problem (if all edge weights are integers, multiplication
|
493 |
+
# by two ensures that all values remain integers throughout the algorithm).
|
494 |
+
# Initially, u(v) = maxweight / 2.
|
495 |
+
dualvar = dict(zip(gnodes, repeat(maxweight)))
|
496 |
+
|
497 |
+
# If b is a non-trivial blossom,
|
498 |
+
# blossomdual[b] = z(b) where z(b) is b's variable in the dual
|
499 |
+
# optimization problem.
|
500 |
+
blossomdual = {}
|
501 |
+
|
502 |
+
# If (v, w) in allowedge or (w, v) in allowedg, then the edge
|
503 |
+
# (v, w) is known to have zero slack in the optimization problem;
|
504 |
+
# otherwise the edge may or may not have zero slack.
|
505 |
+
allowedge = {}
|
506 |
+
|
507 |
+
# Queue of newly discovered S-vertices.
|
508 |
+
queue = []
|
509 |
+
|
510 |
+
# Return 2 * slack of edge (v, w) (does not work inside blossoms).
|
511 |
+
def slack(v, w):
|
512 |
+
return dualvar[v] + dualvar[w] - 2 * G[v][w].get(weight, 1)
|
513 |
+
|
514 |
+
# Assign label t to the top-level blossom containing vertex w,
|
515 |
+
# coming through an edge from vertex v.
|
516 |
+
def assignLabel(w, t, v):
|
517 |
+
b = inblossom[w]
|
518 |
+
assert label.get(w) is None and label.get(b) is None
|
519 |
+
label[w] = label[b] = t
|
520 |
+
if v is not None:
|
521 |
+
labeledge[w] = labeledge[b] = (v, w)
|
522 |
+
else:
|
523 |
+
labeledge[w] = labeledge[b] = None
|
524 |
+
bestedge[w] = bestedge[b] = None
|
525 |
+
if t == 1:
|
526 |
+
# b became an S-vertex/blossom; add it(s vertices) to the queue.
|
527 |
+
if isinstance(b, Blossom):
|
528 |
+
queue.extend(b.leaves())
|
529 |
+
else:
|
530 |
+
queue.append(b)
|
531 |
+
elif t == 2:
|
532 |
+
# b became a T-vertex/blossom; assign label S to its mate.
|
533 |
+
# (If b is a non-trivial blossom, its base is the only vertex
|
534 |
+
# with an external mate.)
|
535 |
+
base = blossombase[b]
|
536 |
+
assignLabel(mate[base], 1, base)
|
537 |
+
|
538 |
+
# Trace back from vertices v and w to discover either a new blossom
|
539 |
+
# or an augmenting path. Return the base vertex of the new blossom,
|
540 |
+
# or NoNode if an augmenting path was found.
|
541 |
+
def scanBlossom(v, w):
|
542 |
+
# Trace back from v and w, placing breadcrumbs as we go.
|
543 |
+
path = []
|
544 |
+
base = NoNode
|
545 |
+
while v is not NoNode:
|
546 |
+
# Look for a breadcrumb in v's blossom or put a new breadcrumb.
|
547 |
+
b = inblossom[v]
|
548 |
+
if label[b] & 4:
|
549 |
+
base = blossombase[b]
|
550 |
+
break
|
551 |
+
assert label[b] == 1
|
552 |
+
path.append(b)
|
553 |
+
label[b] = 5
|
554 |
+
# Trace one step back.
|
555 |
+
if labeledge[b] is None:
|
556 |
+
# The base of blossom b is single; stop tracing this path.
|
557 |
+
assert blossombase[b] not in mate
|
558 |
+
v = NoNode
|
559 |
+
else:
|
560 |
+
assert labeledge[b][0] == mate[blossombase[b]]
|
561 |
+
v = labeledge[b][0]
|
562 |
+
b = inblossom[v]
|
563 |
+
assert label[b] == 2
|
564 |
+
# b is a T-blossom; trace one more step back.
|
565 |
+
v = labeledge[b][0]
|
566 |
+
# Swap v and w so that we alternate between both paths.
|
567 |
+
if w is not NoNode:
|
568 |
+
v, w = w, v
|
569 |
+
# Remove breadcrumbs.
|
570 |
+
for b in path:
|
571 |
+
label[b] = 1
|
572 |
+
# Return base vertex, if we found one.
|
573 |
+
return base
|
574 |
+
|
575 |
+
# Construct a new blossom with given base, through S-vertices v and w.
|
576 |
+
# Label the new blossom as S; set its dual variable to zero;
|
577 |
+
# relabel its T-vertices to S and add them to the queue.
|
578 |
+
def addBlossom(base, v, w):
|
579 |
+
bb = inblossom[base]
|
580 |
+
bv = inblossom[v]
|
581 |
+
bw = inblossom[w]
|
582 |
+
# Create blossom.
|
583 |
+
b = Blossom()
|
584 |
+
blossombase[b] = base
|
585 |
+
blossomparent[b] = None
|
586 |
+
blossomparent[bb] = b
|
587 |
+
# Make list of sub-blossoms and their interconnecting edge endpoints.
|
588 |
+
b.childs = path = []
|
589 |
+
b.edges = edgs = [(v, w)]
|
590 |
+
# Trace back from v to base.
|
591 |
+
while bv != bb:
|
592 |
+
# Add bv to the new blossom.
|
593 |
+
blossomparent[bv] = b
|
594 |
+
path.append(bv)
|
595 |
+
edgs.append(labeledge[bv])
|
596 |
+
assert label[bv] == 2 or (
|
597 |
+
label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]]
|
598 |
+
)
|
599 |
+
# Trace one step back.
|
600 |
+
v = labeledge[bv][0]
|
601 |
+
bv = inblossom[v]
|
602 |
+
# Add base sub-blossom; reverse lists.
|
603 |
+
path.append(bb)
|
604 |
+
path.reverse()
|
605 |
+
edgs.reverse()
|
606 |
+
# Trace back from w to base.
|
607 |
+
while bw != bb:
|
608 |
+
# Add bw to the new blossom.
|
609 |
+
blossomparent[bw] = b
|
610 |
+
path.append(bw)
|
611 |
+
edgs.append((labeledge[bw][1], labeledge[bw][0]))
|
612 |
+
assert label[bw] == 2 or (
|
613 |
+
label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]]
|
614 |
+
)
|
615 |
+
# Trace one step back.
|
616 |
+
w = labeledge[bw][0]
|
617 |
+
bw = inblossom[w]
|
618 |
+
# Set label to S.
|
619 |
+
assert label[bb] == 1
|
620 |
+
label[b] = 1
|
621 |
+
labeledge[b] = labeledge[bb]
|
622 |
+
# Set dual variable to zero.
|
623 |
+
blossomdual[b] = 0
|
624 |
+
# Relabel vertices.
|
625 |
+
for v in b.leaves():
|
626 |
+
if label[inblossom[v]] == 2:
|
627 |
+
# This T-vertex now turns into an S-vertex because it becomes
|
628 |
+
# part of an S-blossom; add it to the queue.
|
629 |
+
queue.append(v)
|
630 |
+
inblossom[v] = b
|
631 |
+
# Compute b.mybestedges.
|
632 |
+
bestedgeto = {}
|
633 |
+
for bv in path:
|
634 |
+
if isinstance(bv, Blossom):
|
635 |
+
if bv.mybestedges is not None:
|
636 |
+
# Walk this subblossom's least-slack edges.
|
637 |
+
nblist = bv.mybestedges
|
638 |
+
# The sub-blossom won't need this data again.
|
639 |
+
bv.mybestedges = None
|
640 |
+
else:
|
641 |
+
# This subblossom does not have a list of least-slack
|
642 |
+
# edges; get the information from the vertices.
|
643 |
+
nblist = [
|
644 |
+
(v, w) for v in bv.leaves() for w in G.neighbors(v) if v != w
|
645 |
+
]
|
646 |
+
else:
|
647 |
+
nblist = [(bv, w) for w in G.neighbors(bv) if bv != w]
|
648 |
+
for k in nblist:
|
649 |
+
(i, j) = k
|
650 |
+
if inblossom[j] == b:
|
651 |
+
i, j = j, i
|
652 |
+
bj = inblossom[j]
|
653 |
+
if (
|
654 |
+
bj != b
|
655 |
+
and label.get(bj) == 1
|
656 |
+
and ((bj not in bestedgeto) or slack(i, j) < slack(*bestedgeto[bj]))
|
657 |
+
):
|
658 |
+
bestedgeto[bj] = k
|
659 |
+
# Forget about least-slack edge of the subblossom.
|
660 |
+
bestedge[bv] = None
|
661 |
+
b.mybestedges = list(bestedgeto.values())
|
662 |
+
# Select bestedge[b].
|
663 |
+
mybestedge = None
|
664 |
+
bestedge[b] = None
|
665 |
+
for k in b.mybestedges:
|
666 |
+
kslack = slack(*k)
|
667 |
+
if mybestedge is None or kslack < mybestslack:
|
668 |
+
mybestedge = k
|
669 |
+
mybestslack = kslack
|
670 |
+
bestedge[b] = mybestedge
|
671 |
+
|
672 |
+
# Expand the given top-level blossom.
|
673 |
+
def expandBlossom(b, endstage):
|
674 |
+
# This is an obnoxiously complicated recursive function for the sake of
|
675 |
+
# a stack-transformation. So, we hack around the complexity by using
|
676 |
+
# a trampoline pattern. By yielding the arguments to each recursive
|
677 |
+
# call, we keep the actual callstack flat.
|
678 |
+
|
679 |
+
def _recurse(b, endstage):
|
680 |
+
# Convert sub-blossoms into top-level blossoms.
|
681 |
+
for s in b.childs:
|
682 |
+
blossomparent[s] = None
|
683 |
+
if isinstance(s, Blossom):
|
684 |
+
if endstage and blossomdual[s] == 0:
|
685 |
+
# Recursively expand this sub-blossom.
|
686 |
+
yield s
|
687 |
+
else:
|
688 |
+
for v in s.leaves():
|
689 |
+
inblossom[v] = s
|
690 |
+
else:
|
691 |
+
inblossom[s] = s
|
692 |
+
# If we expand a T-blossom during a stage, its sub-blossoms must be
|
693 |
+
# relabeled.
|
694 |
+
if (not endstage) and label.get(b) == 2:
|
695 |
+
# Start at the sub-blossom through which the expanding
|
696 |
+
# blossom obtained its label, and relabel sub-blossoms untili
|
697 |
+
# we reach the base.
|
698 |
+
# Figure out through which sub-blossom the expanding blossom
|
699 |
+
# obtained its label initially.
|
700 |
+
entrychild = inblossom[labeledge[b][1]]
|
701 |
+
# Decide in which direction we will go round the blossom.
|
702 |
+
j = b.childs.index(entrychild)
|
703 |
+
if j & 1:
|
704 |
+
# Start index is odd; go forward and wrap.
|
705 |
+
j -= len(b.childs)
|
706 |
+
jstep = 1
|
707 |
+
else:
|
708 |
+
# Start index is even; go backward.
|
709 |
+
jstep = -1
|
710 |
+
# Move along the blossom until we get to the base.
|
711 |
+
v, w = labeledge[b]
|
712 |
+
while j != 0:
|
713 |
+
# Relabel the T-sub-blossom.
|
714 |
+
if jstep == 1:
|
715 |
+
p, q = b.edges[j]
|
716 |
+
else:
|
717 |
+
q, p = b.edges[j - 1]
|
718 |
+
label[w] = None
|
719 |
+
label[q] = None
|
720 |
+
assignLabel(w, 2, v)
|
721 |
+
# Step to the next S-sub-blossom and note its forward edge.
|
722 |
+
allowedge[(p, q)] = allowedge[(q, p)] = True
|
723 |
+
j += jstep
|
724 |
+
if jstep == 1:
|
725 |
+
v, w = b.edges[j]
|
726 |
+
else:
|
727 |
+
w, v = b.edges[j - 1]
|
728 |
+
# Step to the next T-sub-blossom.
|
729 |
+
allowedge[(v, w)] = allowedge[(w, v)] = True
|
730 |
+
j += jstep
|
731 |
+
# Relabel the base T-sub-blossom WITHOUT stepping through to
|
732 |
+
# its mate (so don't call assignLabel).
|
733 |
+
bw = b.childs[j]
|
734 |
+
label[w] = label[bw] = 2
|
735 |
+
labeledge[w] = labeledge[bw] = (v, w)
|
736 |
+
bestedge[bw] = None
|
737 |
+
# Continue along the blossom until we get back to entrychild.
|
738 |
+
j += jstep
|
739 |
+
while b.childs[j] != entrychild:
|
740 |
+
# Examine the vertices of the sub-blossom to see whether
|
741 |
+
# it is reachable from a neighboring S-vertex outside the
|
742 |
+
# expanding blossom.
|
743 |
+
bv = b.childs[j]
|
744 |
+
if label.get(bv) == 1:
|
745 |
+
# This sub-blossom just got label S through one of its
|
746 |
+
# neighbors; leave it be.
|
747 |
+
j += jstep
|
748 |
+
continue
|
749 |
+
if isinstance(bv, Blossom):
|
750 |
+
for v in bv.leaves():
|
751 |
+
if label.get(v):
|
752 |
+
break
|
753 |
+
else:
|
754 |
+
v = bv
|
755 |
+
# If the sub-blossom contains a reachable vertex, assign
|
756 |
+
# label T to the sub-blossom.
|
757 |
+
if label.get(v):
|
758 |
+
assert label[v] == 2
|
759 |
+
assert inblossom[v] == bv
|
760 |
+
label[v] = None
|
761 |
+
label[mate[blossombase[bv]]] = None
|
762 |
+
assignLabel(v, 2, labeledge[v][0])
|
763 |
+
j += jstep
|
764 |
+
# Remove the expanded blossom entirely.
|
765 |
+
label.pop(b, None)
|
766 |
+
labeledge.pop(b, None)
|
767 |
+
bestedge.pop(b, None)
|
768 |
+
del blossomparent[b]
|
769 |
+
del blossombase[b]
|
770 |
+
del blossomdual[b]
|
771 |
+
|
772 |
+
# Now, we apply the trampoline pattern. We simulate a recursive
|
773 |
+
# callstack by maintaining a stack of generators, each yielding a
|
774 |
+
# sequence of function arguments. We grow the stack by appending a call
|
775 |
+
# to _recurse on each argument tuple, and shrink the stack whenever a
|
776 |
+
# generator is exhausted.
|
777 |
+
stack = [_recurse(b, endstage)]
|
778 |
+
while stack:
|
779 |
+
top = stack[-1]
|
780 |
+
for s in top:
|
781 |
+
stack.append(_recurse(s, endstage))
|
782 |
+
break
|
783 |
+
else:
|
784 |
+
stack.pop()
|
785 |
+
|
786 |
+
# Swap matched/unmatched edges over an alternating path through blossom b
|
787 |
+
# between vertex v and the base vertex. Keep blossom bookkeeping
|
788 |
+
# consistent.
|
789 |
+
def augmentBlossom(b, v):
|
790 |
+
# This is an obnoxiously complicated recursive function for the sake of
|
791 |
+
# a stack-transformation. So, we hack around the complexity by using
|
792 |
+
# a trampoline pattern. By yielding the arguments to each recursive
|
793 |
+
# call, we keep the actual callstack flat.
|
794 |
+
|
795 |
+
def _recurse(b, v):
|
796 |
+
# Bubble up through the blossom tree from vertex v to an immediate
|
797 |
+
# sub-blossom of b.
|
798 |
+
t = v
|
799 |
+
while blossomparent[t] != b:
|
800 |
+
t = blossomparent[t]
|
801 |
+
# Recursively deal with the first sub-blossom.
|
802 |
+
if isinstance(t, Blossom):
|
803 |
+
yield (t, v)
|
804 |
+
# Decide in which direction we will go round the blossom.
|
805 |
+
i = j = b.childs.index(t)
|
806 |
+
if i & 1:
|
807 |
+
# Start index is odd; go forward and wrap.
|
808 |
+
j -= len(b.childs)
|
809 |
+
jstep = 1
|
810 |
+
else:
|
811 |
+
# Start index is even; go backward.
|
812 |
+
jstep = -1
|
813 |
+
# Move along the blossom until we get to the base.
|
814 |
+
while j != 0:
|
815 |
+
# Step to the next sub-blossom and augment it recursively.
|
816 |
+
j += jstep
|
817 |
+
t = b.childs[j]
|
818 |
+
if jstep == 1:
|
819 |
+
w, x = b.edges[j]
|
820 |
+
else:
|
821 |
+
x, w = b.edges[j - 1]
|
822 |
+
if isinstance(t, Blossom):
|
823 |
+
yield (t, w)
|
824 |
+
# Step to the next sub-blossom and augment it recursively.
|
825 |
+
j += jstep
|
826 |
+
t = b.childs[j]
|
827 |
+
if isinstance(t, Blossom):
|
828 |
+
yield (t, x)
|
829 |
+
# Match the edge connecting those sub-blossoms.
|
830 |
+
mate[w] = x
|
831 |
+
mate[x] = w
|
832 |
+
# Rotate the list of sub-blossoms to put the new base at the front.
|
833 |
+
b.childs = b.childs[i:] + b.childs[:i]
|
834 |
+
b.edges = b.edges[i:] + b.edges[:i]
|
835 |
+
blossombase[b] = blossombase[b.childs[0]]
|
836 |
+
assert blossombase[b] == v
|
837 |
+
|
838 |
+
# Now, we apply the trampoline pattern. We simulate a recursive
|
839 |
+
# callstack by maintaining a stack of generators, each yielding a
|
840 |
+
# sequence of function arguments. We grow the stack by appending a call
|
841 |
+
# to _recurse on each argument tuple, and shrink the stack whenever a
|
842 |
+
# generator is exhausted.
|
843 |
+
stack = [_recurse(b, v)]
|
844 |
+
while stack:
|
845 |
+
top = stack[-1]
|
846 |
+
for args in top:
|
847 |
+
stack.append(_recurse(*args))
|
848 |
+
break
|
849 |
+
else:
|
850 |
+
stack.pop()
|
851 |
+
|
852 |
+
# Swap matched/unmatched edges over an alternating path between two
|
853 |
+
# single vertices. The augmenting path runs through S-vertices v and w.
|
854 |
+
def augmentMatching(v, w):
|
855 |
+
for s, j in ((v, w), (w, v)):
|
856 |
+
# Match vertex s to vertex j. Then trace back from s
|
857 |
+
# until we find a single vertex, swapping matched and unmatched
|
858 |
+
# edges as we go.
|
859 |
+
while 1:
|
860 |
+
bs = inblossom[s]
|
861 |
+
assert label[bs] == 1
|
862 |
+
assert (labeledge[bs] is None and blossombase[bs] not in mate) or (
|
863 |
+
labeledge[bs][0] == mate[blossombase[bs]]
|
864 |
+
)
|
865 |
+
# Augment through the S-blossom from s to base.
|
866 |
+
if isinstance(bs, Blossom):
|
867 |
+
augmentBlossom(bs, s)
|
868 |
+
# Update mate[s]
|
869 |
+
mate[s] = j
|
870 |
+
# Trace one step back.
|
871 |
+
if labeledge[bs] is None:
|
872 |
+
# Reached single vertex; stop.
|
873 |
+
break
|
874 |
+
t = labeledge[bs][0]
|
875 |
+
bt = inblossom[t]
|
876 |
+
assert label[bt] == 2
|
877 |
+
# Trace one more step back.
|
878 |
+
s, j = labeledge[bt]
|
879 |
+
# Augment through the T-blossom from j to base.
|
880 |
+
assert blossombase[bt] == t
|
881 |
+
if isinstance(bt, Blossom):
|
882 |
+
augmentBlossom(bt, j)
|
883 |
+
# Update mate[j]
|
884 |
+
mate[j] = s
|
885 |
+
|
886 |
+
# Verify that the optimum solution has been reached.
|
887 |
+
def verifyOptimum():
|
888 |
+
if maxcardinality:
|
889 |
+
# Vertices may have negative dual;
|
890 |
+
# find a constant non-negative number to add to all vertex duals.
|
891 |
+
vdualoffset = max(0, -min(dualvar.values()))
|
892 |
+
else:
|
893 |
+
vdualoffset = 0
|
894 |
+
# 0. all dual variables are non-negative
|
895 |
+
assert min(dualvar.values()) + vdualoffset >= 0
|
896 |
+
assert len(blossomdual) == 0 or min(blossomdual.values()) >= 0
|
897 |
+
# 0. all edges have non-negative slack and
|
898 |
+
# 1. all matched edges have zero slack;
|
899 |
+
for i, j, d in G.edges(data=True):
|
900 |
+
wt = d.get(weight, 1)
|
901 |
+
if i == j:
|
902 |
+
continue # ignore self-loops
|
903 |
+
s = dualvar[i] + dualvar[j] - 2 * wt
|
904 |
+
iblossoms = [i]
|
905 |
+
jblossoms = [j]
|
906 |
+
while blossomparent[iblossoms[-1]] is not None:
|
907 |
+
iblossoms.append(blossomparent[iblossoms[-1]])
|
908 |
+
while blossomparent[jblossoms[-1]] is not None:
|
909 |
+
jblossoms.append(blossomparent[jblossoms[-1]])
|
910 |
+
iblossoms.reverse()
|
911 |
+
jblossoms.reverse()
|
912 |
+
for bi, bj in zip(iblossoms, jblossoms):
|
913 |
+
if bi != bj:
|
914 |
+
break
|
915 |
+
s += 2 * blossomdual[bi]
|
916 |
+
assert s >= 0
|
917 |
+
if mate.get(i) == j or mate.get(j) == i:
|
918 |
+
assert mate[i] == j and mate[j] == i
|
919 |
+
assert s == 0
|
920 |
+
# 2. all single vertices have zero dual value;
|
921 |
+
for v in gnodes:
|
922 |
+
assert (v in mate) or dualvar[v] + vdualoffset == 0
|
923 |
+
# 3. all blossoms with positive dual value are full.
|
924 |
+
for b in blossomdual:
|
925 |
+
if blossomdual[b] > 0:
|
926 |
+
assert len(b.edges) % 2 == 1
|
927 |
+
for i, j in b.edges[1::2]:
|
928 |
+
assert mate[i] == j and mate[j] == i
|
929 |
+
# Ok.
|
930 |
+
|
931 |
+
# Main loop: continue until no further improvement is possible.
|
932 |
+
while 1:
|
933 |
+
# Each iteration of this loop is a "stage".
|
934 |
+
# A stage finds an augmenting path and uses that to improve
|
935 |
+
# the matching.
|
936 |
+
|
937 |
+
# Remove labels from top-level blossoms/vertices.
|
938 |
+
label.clear()
|
939 |
+
labeledge.clear()
|
940 |
+
|
941 |
+
# Forget all about least-slack edges.
|
942 |
+
bestedge.clear()
|
943 |
+
for b in blossomdual:
|
944 |
+
b.mybestedges = None
|
945 |
+
|
946 |
+
# Loss of labeling means that we can not be sure that currently
|
947 |
+
# allowable edges remain allowable throughout this stage.
|
948 |
+
allowedge.clear()
|
949 |
+
|
950 |
+
# Make queue empty.
|
951 |
+
queue[:] = []
|
952 |
+
|
953 |
+
# Label single blossoms/vertices with S and put them in the queue.
|
954 |
+
for v in gnodes:
|
955 |
+
if (v not in mate) and label.get(inblossom[v]) is None:
|
956 |
+
assignLabel(v, 1, None)
|
957 |
+
|
958 |
+
# Loop until we succeed in augmenting the matching.
|
959 |
+
augmented = 0
|
960 |
+
while 1:
|
961 |
+
# Each iteration of this loop is a "substage".
|
962 |
+
# A substage tries to find an augmenting path;
|
963 |
+
# if found, the path is used to improve the matching and
|
964 |
+
# the stage ends. If there is no augmenting path, the
|
965 |
+
# primal-dual method is used to pump some slack out of
|
966 |
+
# the dual variables.
|
967 |
+
|
968 |
+
# Continue labeling until all vertices which are reachable
|
969 |
+
# through an alternating path have got a label.
|
970 |
+
while queue and not augmented:
|
971 |
+
# Take an S vertex from the queue.
|
972 |
+
v = queue.pop()
|
973 |
+
assert label[inblossom[v]] == 1
|
974 |
+
|
975 |
+
# Scan its neighbors:
|
976 |
+
for w in G.neighbors(v):
|
977 |
+
if w == v:
|
978 |
+
continue # ignore self-loops
|
979 |
+
# w is a neighbor to v
|
980 |
+
bv = inblossom[v]
|
981 |
+
bw = inblossom[w]
|
982 |
+
if bv == bw:
|
983 |
+
# this edge is internal to a blossom; ignore it
|
984 |
+
continue
|
985 |
+
if (v, w) not in allowedge:
|
986 |
+
kslack = slack(v, w)
|
987 |
+
if kslack <= 0:
|
988 |
+
# edge k has zero slack => it is allowable
|
989 |
+
allowedge[(v, w)] = allowedge[(w, v)] = True
|
990 |
+
if (v, w) in allowedge:
|
991 |
+
if label.get(bw) is None:
|
992 |
+
# (C1) w is a free vertex;
|
993 |
+
# label w with T and label its mate with S (R12).
|
994 |
+
assignLabel(w, 2, v)
|
995 |
+
elif label.get(bw) == 1:
|
996 |
+
# (C2) w is an S-vertex (not in the same blossom);
|
997 |
+
# follow back-links to discover either an
|
998 |
+
# augmenting path or a new blossom.
|
999 |
+
base = scanBlossom(v, w)
|
1000 |
+
if base is not NoNode:
|
1001 |
+
# Found a new blossom; add it to the blossom
|
1002 |
+
# bookkeeping and turn it into an S-blossom.
|
1003 |
+
addBlossom(base, v, w)
|
1004 |
+
else:
|
1005 |
+
# Found an augmenting path; augment the
|
1006 |
+
# matching and end this stage.
|
1007 |
+
augmentMatching(v, w)
|
1008 |
+
augmented = 1
|
1009 |
+
break
|
1010 |
+
elif label.get(w) is None:
|
1011 |
+
# w is inside a T-blossom, but w itself has not
|
1012 |
+
# yet been reached from outside the blossom;
|
1013 |
+
# mark it as reached (we need this to relabel
|
1014 |
+
# during T-blossom expansion).
|
1015 |
+
assert label[bw] == 2
|
1016 |
+
label[w] = 2
|
1017 |
+
labeledge[w] = (v, w)
|
1018 |
+
elif label.get(bw) == 1:
|
1019 |
+
# keep track of the least-slack non-allowable edge to
|
1020 |
+
# a different S-blossom.
|
1021 |
+
if bestedge.get(bv) is None or kslack < slack(*bestedge[bv]):
|
1022 |
+
bestedge[bv] = (v, w)
|
1023 |
+
elif label.get(w) is None:
|
1024 |
+
# w is a free vertex (or an unreached vertex inside
|
1025 |
+
# a T-blossom) but we can not reach it yet;
|
1026 |
+
# keep track of the least-slack edge that reaches w.
|
1027 |
+
if bestedge.get(w) is None or kslack < slack(*bestedge[w]):
|
1028 |
+
bestedge[w] = (v, w)
|
1029 |
+
|
1030 |
+
if augmented:
|
1031 |
+
break
|
1032 |
+
|
1033 |
+
# There is no augmenting path under these constraints;
|
1034 |
+
# compute delta and reduce slack in the optimization problem.
|
1035 |
+
# (Note that our vertex dual variables, edge slacks and delta's
|
1036 |
+
# are pre-multiplied by two.)
|
1037 |
+
deltatype = -1
|
1038 |
+
delta = deltaedge = deltablossom = None
|
1039 |
+
|
1040 |
+
# Compute delta1: the minimum value of any vertex dual.
|
1041 |
+
if not maxcardinality:
|
1042 |
+
deltatype = 1
|
1043 |
+
delta = min(dualvar.values())
|
1044 |
+
|
1045 |
+
# Compute delta2: the minimum slack on any edge between
|
1046 |
+
# an S-vertex and a free vertex.
|
1047 |
+
for v in G.nodes():
|
1048 |
+
if label.get(inblossom[v]) is None and bestedge.get(v) is not None:
|
1049 |
+
d = slack(*bestedge[v])
|
1050 |
+
if deltatype == -1 or d < delta:
|
1051 |
+
delta = d
|
1052 |
+
deltatype = 2
|
1053 |
+
deltaedge = bestedge[v]
|
1054 |
+
|
1055 |
+
# Compute delta3: half the minimum slack on any edge between
|
1056 |
+
# a pair of S-blossoms.
|
1057 |
+
for b in blossomparent:
|
1058 |
+
if (
|
1059 |
+
blossomparent[b] is None
|
1060 |
+
and label.get(b) == 1
|
1061 |
+
and bestedge.get(b) is not None
|
1062 |
+
):
|
1063 |
+
kslack = slack(*bestedge[b])
|
1064 |
+
if allinteger:
|
1065 |
+
assert (kslack % 2) == 0
|
1066 |
+
d = kslack // 2
|
1067 |
+
else:
|
1068 |
+
d = kslack / 2.0
|
1069 |
+
if deltatype == -1 or d < delta:
|
1070 |
+
delta = d
|
1071 |
+
deltatype = 3
|
1072 |
+
deltaedge = bestedge[b]
|
1073 |
+
|
1074 |
+
# Compute delta4: minimum z variable of any T-blossom.
|
1075 |
+
for b in blossomdual:
|
1076 |
+
if (
|
1077 |
+
blossomparent[b] is None
|
1078 |
+
and label.get(b) == 2
|
1079 |
+
and (deltatype == -1 or blossomdual[b] < delta)
|
1080 |
+
):
|
1081 |
+
delta = blossomdual[b]
|
1082 |
+
deltatype = 4
|
1083 |
+
deltablossom = b
|
1084 |
+
|
1085 |
+
if deltatype == -1:
|
1086 |
+
# No further improvement possible; max-cardinality optimum
|
1087 |
+
# reached. Do a final delta update to make the optimum
|
1088 |
+
# verifiable.
|
1089 |
+
assert maxcardinality
|
1090 |
+
deltatype = 1
|
1091 |
+
delta = max(0, min(dualvar.values()))
|
1092 |
+
|
1093 |
+
# Update dual variables according to delta.
|
1094 |
+
for v in gnodes:
|
1095 |
+
if label.get(inblossom[v]) == 1:
|
1096 |
+
# S-vertex: 2*u = 2*u - 2*delta
|
1097 |
+
dualvar[v] -= delta
|
1098 |
+
elif label.get(inblossom[v]) == 2:
|
1099 |
+
# T-vertex: 2*u = 2*u + 2*delta
|
1100 |
+
dualvar[v] += delta
|
1101 |
+
for b in blossomdual:
|
1102 |
+
if blossomparent[b] is None:
|
1103 |
+
if label.get(b) == 1:
|
1104 |
+
# top-level S-blossom: z = z + 2*delta
|
1105 |
+
blossomdual[b] += delta
|
1106 |
+
elif label.get(b) == 2:
|
1107 |
+
# top-level T-blossom: z = z - 2*delta
|
1108 |
+
blossomdual[b] -= delta
|
1109 |
+
|
1110 |
+
# Take action at the point where minimum delta occurred.
|
1111 |
+
if deltatype == 1:
|
1112 |
+
# No further improvement possible; optimum reached.
|
1113 |
+
break
|
1114 |
+
elif deltatype == 2:
|
1115 |
+
# Use the least-slack edge to continue the search.
|
1116 |
+
(v, w) = deltaedge
|
1117 |
+
assert label[inblossom[v]] == 1
|
1118 |
+
allowedge[(v, w)] = allowedge[(w, v)] = True
|
1119 |
+
queue.append(v)
|
1120 |
+
elif deltatype == 3:
|
1121 |
+
# Use the least-slack edge to continue the search.
|
1122 |
+
(v, w) = deltaedge
|
1123 |
+
allowedge[(v, w)] = allowedge[(w, v)] = True
|
1124 |
+
assert label[inblossom[v]] == 1
|
1125 |
+
queue.append(v)
|
1126 |
+
elif deltatype == 4:
|
1127 |
+
# Expand the least-z blossom.
|
1128 |
+
expandBlossom(deltablossom, False)
|
1129 |
+
|
1130 |
+
# End of a this substage.
|
1131 |
+
|
1132 |
+
# Paranoia check that the matching is symmetric.
|
1133 |
+
for v in mate:
|
1134 |
+
assert mate[mate[v]] == v
|
1135 |
+
|
1136 |
+
# Stop when no more augmenting path can be found.
|
1137 |
+
if not augmented:
|
1138 |
+
break
|
1139 |
+
|
1140 |
+
# End of a stage; expand all S-blossoms which have zero dual.
|
1141 |
+
for b in list(blossomdual.keys()):
|
1142 |
+
if b not in blossomdual:
|
1143 |
+
continue # already expanded
|
1144 |
+
if blossomparent[b] is None and label.get(b) == 1 and blossomdual[b] == 0:
|
1145 |
+
expandBlossom(b, True)
|
1146 |
+
|
1147 |
+
# Verify that we reached the optimum solution (only for integer weights).
|
1148 |
+
if allinteger:
|
1149 |
+
verifyOptimum()
|
1150 |
+
|
1151 |
+
return matching_dict_to_set(mate)
|
venv/lib/python3.10/site-packages/networkx/algorithms/node_classification.py
ADDED
@@ -0,0 +1,218 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" This module provides the functions for node classification problem.
|
2 |
+
|
3 |
+
The functions in this module are not imported
|
4 |
+
into the top level `networkx` namespace.
|
5 |
+
You can access these functions by importing
|
6 |
+
the `networkx.algorithms.node_classification` modules,
|
7 |
+
then accessing the functions as attributes of `node_classification`.
|
8 |
+
For example:
|
9 |
+
|
10 |
+
>>> from networkx.algorithms import node_classification
|
11 |
+
>>> G = nx.path_graph(4)
|
12 |
+
>>> G.edges()
|
13 |
+
EdgeView([(0, 1), (1, 2), (2, 3)])
|
14 |
+
>>> G.nodes[0]["label"] = "A"
|
15 |
+
>>> G.nodes[3]["label"] = "B"
|
16 |
+
>>> node_classification.harmonic_function(G)
|
17 |
+
['A', 'A', 'B', 'B']
|
18 |
+
|
19 |
+
References
|
20 |
+
----------
|
21 |
+
Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August).
|
22 |
+
Semi-supervised learning using gaussian fields and harmonic functions.
|
23 |
+
In ICML (Vol. 3, pp. 912-919).
|
24 |
+
"""
|
25 |
+
import networkx as nx
|
26 |
+
|
27 |
+
__all__ = ["harmonic_function", "local_and_global_consistency"]
|
28 |
+
|
29 |
+
|
30 |
+
@nx.utils.not_implemented_for("directed")
|
31 |
+
@nx._dispatchable(node_attrs="label_name")
|
32 |
+
def harmonic_function(G, max_iter=30, label_name="label"):
|
33 |
+
"""Node classification by Harmonic function
|
34 |
+
|
35 |
+
Function for computing Harmonic function algorithm by Zhu et al.
|
36 |
+
|
37 |
+
Parameters
|
38 |
+
----------
|
39 |
+
G : NetworkX Graph
|
40 |
+
max_iter : int
|
41 |
+
maximum number of iterations allowed
|
42 |
+
label_name : string
|
43 |
+
name of target labels to predict
|
44 |
+
|
45 |
+
Returns
|
46 |
+
-------
|
47 |
+
predicted : list
|
48 |
+
List of length ``len(G)`` with the predicted labels for each node.
|
49 |
+
|
50 |
+
Raises
|
51 |
+
------
|
52 |
+
NetworkXError
|
53 |
+
If no nodes in `G` have attribute `label_name`.
|
54 |
+
|
55 |
+
Examples
|
56 |
+
--------
|
57 |
+
>>> from networkx.algorithms import node_classification
|
58 |
+
>>> G = nx.path_graph(4)
|
59 |
+
>>> G.nodes[0]["label"] = "A"
|
60 |
+
>>> G.nodes[3]["label"] = "B"
|
61 |
+
>>> G.nodes(data=True)
|
62 |
+
NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}})
|
63 |
+
>>> G.edges()
|
64 |
+
EdgeView([(0, 1), (1, 2), (2, 3)])
|
65 |
+
>>> predicted = node_classification.harmonic_function(G)
|
66 |
+
>>> predicted
|
67 |
+
['A', 'A', 'B', 'B']
|
68 |
+
|
69 |
+
References
|
70 |
+
----------
|
71 |
+
Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August).
|
72 |
+
Semi-supervised learning using gaussian fields and harmonic functions.
|
73 |
+
In ICML (Vol. 3, pp. 912-919).
|
74 |
+
"""
|
75 |
+
import numpy as np
|
76 |
+
import scipy as sp
|
77 |
+
|
78 |
+
X = nx.to_scipy_sparse_array(G) # adjacency matrix
|
79 |
+
labels, label_dict = _get_label_info(G, label_name)
|
80 |
+
|
81 |
+
if labels.shape[0] == 0:
|
82 |
+
raise nx.NetworkXError(
|
83 |
+
f"No node on the input graph is labeled by '{label_name}'."
|
84 |
+
)
|
85 |
+
|
86 |
+
n_samples = X.shape[0]
|
87 |
+
n_classes = label_dict.shape[0]
|
88 |
+
F = np.zeros((n_samples, n_classes))
|
89 |
+
|
90 |
+
# Build propagation matrix
|
91 |
+
degrees = X.sum(axis=0)
|
92 |
+
degrees[degrees == 0] = 1 # Avoid division by 0
|
93 |
+
# TODO: csr_array
|
94 |
+
D = sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0))
|
95 |
+
P = (D @ X).tolil()
|
96 |
+
P[labels[:, 0]] = 0 # labels[:, 0] indicates IDs of labeled nodes
|
97 |
+
# Build base matrix
|
98 |
+
B = np.zeros((n_samples, n_classes))
|
99 |
+
B[labels[:, 0], labels[:, 1]] = 1
|
100 |
+
|
101 |
+
for _ in range(max_iter):
|
102 |
+
F = (P @ F) + B
|
103 |
+
|
104 |
+
return label_dict[np.argmax(F, axis=1)].tolist()
|
105 |
+
|
106 |
+
|
107 |
+
@nx.utils.not_implemented_for("directed")
|
108 |
+
@nx._dispatchable(node_attrs="label_name")
|
109 |
+
def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"):
|
110 |
+
"""Node classification by Local and Global Consistency
|
111 |
+
|
112 |
+
Function for computing Local and global consistency algorithm by Zhou et al.
|
113 |
+
|
114 |
+
Parameters
|
115 |
+
----------
|
116 |
+
G : NetworkX Graph
|
117 |
+
alpha : float
|
118 |
+
Clamping factor
|
119 |
+
max_iter : int
|
120 |
+
Maximum number of iterations allowed
|
121 |
+
label_name : string
|
122 |
+
Name of target labels to predict
|
123 |
+
|
124 |
+
Returns
|
125 |
+
-------
|
126 |
+
predicted : list
|
127 |
+
List of length ``len(G)`` with the predicted labels for each node.
|
128 |
+
|
129 |
+
Raises
|
130 |
+
------
|
131 |
+
NetworkXError
|
132 |
+
If no nodes in `G` have attribute `label_name`.
|
133 |
+
|
134 |
+
Examples
|
135 |
+
--------
|
136 |
+
>>> from networkx.algorithms import node_classification
|
137 |
+
>>> G = nx.path_graph(4)
|
138 |
+
>>> G.nodes[0]["label"] = "A"
|
139 |
+
>>> G.nodes[3]["label"] = "B"
|
140 |
+
>>> G.nodes(data=True)
|
141 |
+
NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}})
|
142 |
+
>>> G.edges()
|
143 |
+
EdgeView([(0, 1), (1, 2), (2, 3)])
|
144 |
+
>>> predicted = node_classification.local_and_global_consistency(G)
|
145 |
+
>>> predicted
|
146 |
+
['A', 'A', 'B', 'B']
|
147 |
+
|
148 |
+
References
|
149 |
+
----------
|
150 |
+
Zhou, D., Bousquet, O., Lal, T. N., Weston, J., & Schölkopf, B. (2004).
|
151 |
+
Learning with local and global consistency.
|
152 |
+
Advances in neural information processing systems, 16(16), 321-328.
|
153 |
+
"""
|
154 |
+
import numpy as np
|
155 |
+
import scipy as sp
|
156 |
+
|
157 |
+
X = nx.to_scipy_sparse_array(G) # adjacency matrix
|
158 |
+
labels, label_dict = _get_label_info(G, label_name)
|
159 |
+
|
160 |
+
if labels.shape[0] == 0:
|
161 |
+
raise nx.NetworkXError(
|
162 |
+
f"No node on the input graph is labeled by '{label_name}'."
|
163 |
+
)
|
164 |
+
|
165 |
+
n_samples = X.shape[0]
|
166 |
+
n_classes = label_dict.shape[0]
|
167 |
+
F = np.zeros((n_samples, n_classes))
|
168 |
+
|
169 |
+
# Build propagation matrix
|
170 |
+
degrees = X.sum(axis=0)
|
171 |
+
degrees[degrees == 0] = 1 # Avoid division by 0
|
172 |
+
# TODO: csr_array
|
173 |
+
D2 = np.sqrt(sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0)))
|
174 |
+
P = alpha * ((D2 @ X) @ D2)
|
175 |
+
# Build base matrix
|
176 |
+
B = np.zeros((n_samples, n_classes))
|
177 |
+
B[labels[:, 0], labels[:, 1]] = 1 - alpha
|
178 |
+
|
179 |
+
for _ in range(max_iter):
|
180 |
+
F = (P @ F) + B
|
181 |
+
|
182 |
+
return label_dict[np.argmax(F, axis=1)].tolist()
|
183 |
+
|
184 |
+
|
185 |
+
def _get_label_info(G, label_name):
|
186 |
+
"""Get and return information of labels from the input graph
|
187 |
+
|
188 |
+
Parameters
|
189 |
+
----------
|
190 |
+
G : Network X graph
|
191 |
+
label_name : string
|
192 |
+
Name of the target label
|
193 |
+
|
194 |
+
Returns
|
195 |
+
-------
|
196 |
+
labels : numpy array, shape = [n_labeled_samples, 2]
|
197 |
+
Array of pairs of labeled node ID and label ID
|
198 |
+
label_dict : numpy array, shape = [n_classes]
|
199 |
+
Array of labels
|
200 |
+
i-th element contains the label corresponding label ID `i`
|
201 |
+
"""
|
202 |
+
import numpy as np
|
203 |
+
|
204 |
+
labels = []
|
205 |
+
label_to_id = {}
|
206 |
+
lid = 0
|
207 |
+
for i, n in enumerate(G.nodes(data=True)):
|
208 |
+
if label_name in n[1]:
|
209 |
+
label = n[1][label_name]
|
210 |
+
if label not in label_to_id:
|
211 |
+
label_to_id[label] = lid
|
212 |
+
lid += 1
|
213 |
+
labels.append([i, label_to_id[label]])
|
214 |
+
labels = np.array(labels)
|
215 |
+
label_dict = np.array(
|
216 |
+
[label for label, _ in sorted(label_to_id.items(), key=lambda x: x[1])]
|
217 |
+
)
|
218 |
+
return (labels, label_dict)
|
venv/lib/python3.10/site-packages/networkx/algorithms/non_randomness.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
r""" Computation of graph non-randomness
|
2 |
+
"""
|
3 |
+
|
4 |
+
import math
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils import not_implemented_for
|
8 |
+
|
9 |
+
__all__ = ["non_randomness"]
|
10 |
+
|
11 |
+
|
12 |
+
@not_implemented_for("directed")
|
13 |
+
@not_implemented_for("multigraph")
|
14 |
+
@nx._dispatchable(edge_attrs="weight")
|
15 |
+
def non_randomness(G, k=None, weight="weight"):
|
16 |
+
"""Compute the non-randomness of graph G.
|
17 |
+
|
18 |
+
The first returned value nr is the sum of non-randomness values of all
|
19 |
+
edges within the graph (where the non-randomness of an edge tends to be
|
20 |
+
small when the two nodes linked by that edge are from two different
|
21 |
+
communities).
|
22 |
+
|
23 |
+
The second computed value nr_rd is a relative measure that indicates
|
24 |
+
to what extent graph G is different from random graphs in terms
|
25 |
+
of probability. When it is close to 0, the graph tends to be more
|
26 |
+
likely generated by an Erdos Renyi model.
|
27 |
+
|
28 |
+
Parameters
|
29 |
+
----------
|
30 |
+
G : NetworkX graph
|
31 |
+
Graph must be symmetric, connected, and without self-loops.
|
32 |
+
|
33 |
+
k : int
|
34 |
+
The number of communities in G.
|
35 |
+
If k is not set, the function will use a default community
|
36 |
+
detection algorithm to set it.
|
37 |
+
|
38 |
+
weight : string or None, optional (default=None)
|
39 |
+
The name of an edge attribute that holds the numerical value used
|
40 |
+
as a weight. If None, then each edge has weight 1, i.e., the graph is
|
41 |
+
binary.
|
42 |
+
|
43 |
+
Returns
|
44 |
+
-------
|
45 |
+
non-randomness : (float, float) tuple
|
46 |
+
Non-randomness, Relative non-randomness w.r.t.
|
47 |
+
Erdos Renyi random graphs.
|
48 |
+
|
49 |
+
Raises
|
50 |
+
------
|
51 |
+
NetworkXException
|
52 |
+
if the input graph is not connected.
|
53 |
+
NetworkXError
|
54 |
+
if the input graph contains self-loops.
|
55 |
+
|
56 |
+
Examples
|
57 |
+
--------
|
58 |
+
>>> G = nx.karate_club_graph()
|
59 |
+
>>> nr, nr_rd = nx.non_randomness(G, 2)
|
60 |
+
>>> nr, nr_rd = nx.non_randomness(G, 2, "weight")
|
61 |
+
|
62 |
+
Notes
|
63 |
+
-----
|
64 |
+
This computes Eq. (4.4) and (4.5) in Ref. [1]_.
|
65 |
+
|
66 |
+
If a weight field is passed, this algorithm will use the eigenvalues
|
67 |
+
of the weighted adjacency matrix to compute Eq. (4.4) and (4.5).
|
68 |
+
|
69 |
+
References
|
70 |
+
----------
|
71 |
+
.. [1] Xiaowei Ying and Xintao Wu,
|
72 |
+
On Randomness Measures for Social Networks,
|
73 |
+
SIAM International Conference on Data Mining. 2009
|
74 |
+
"""
|
75 |
+
import numpy as np
|
76 |
+
|
77 |
+
if not nx.is_connected(G):
|
78 |
+
raise nx.NetworkXException("Non connected graph.")
|
79 |
+
if len(list(nx.selfloop_edges(G))) > 0:
|
80 |
+
raise nx.NetworkXError("Graph must not contain self-loops")
|
81 |
+
|
82 |
+
if k is None:
|
83 |
+
k = len(tuple(nx.community.label_propagation_communities(G)))
|
84 |
+
|
85 |
+
# eq. 4.4
|
86 |
+
eigenvalues = np.linalg.eigvals(nx.to_numpy_array(G, weight=weight))
|
87 |
+
nr = float(np.real(np.sum(eigenvalues[:k])))
|
88 |
+
|
89 |
+
n = G.number_of_nodes()
|
90 |
+
m = G.number_of_edges()
|
91 |
+
p = (2 * k * m) / (n * (n - k))
|
92 |
+
|
93 |
+
# eq. 4.5
|
94 |
+
nr_rd = (nr - ((n - 2 * k) * p + k)) / math.sqrt(2 * k * p * (1 - p))
|
95 |
+
|
96 |
+
return nr, nr_rd
|
venv/lib/python3.10/site-packages/networkx/algorithms/planarity.py
ADDED
@@ -0,0 +1,1402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections import defaultdict
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = ["check_planarity", "is_planar", "PlanarEmbedding"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable
|
9 |
+
def is_planar(G):
|
10 |
+
"""Returns True if and only if `G` is planar.
|
11 |
+
|
12 |
+
A graph is *planar* iff it can be drawn in a plane without
|
13 |
+
any edge intersections.
|
14 |
+
|
15 |
+
Parameters
|
16 |
+
----------
|
17 |
+
G : NetworkX graph
|
18 |
+
|
19 |
+
Returns
|
20 |
+
-------
|
21 |
+
bool
|
22 |
+
Whether the graph is planar.
|
23 |
+
|
24 |
+
Examples
|
25 |
+
--------
|
26 |
+
>>> G = nx.Graph([(0, 1), (0, 2)])
|
27 |
+
>>> nx.is_planar(G)
|
28 |
+
True
|
29 |
+
>>> nx.is_planar(nx.complete_graph(5))
|
30 |
+
False
|
31 |
+
|
32 |
+
See Also
|
33 |
+
--------
|
34 |
+
check_planarity :
|
35 |
+
Check if graph is planar *and* return a `PlanarEmbedding` instance if True.
|
36 |
+
"""
|
37 |
+
|
38 |
+
return check_planarity(G, counterexample=False)[0]
|
39 |
+
|
40 |
+
|
41 |
+
@nx._dispatchable(returns_graph=True)
|
42 |
+
def check_planarity(G, counterexample=False):
|
43 |
+
"""Check if a graph is planar and return a counterexample or an embedding.
|
44 |
+
|
45 |
+
A graph is planar iff it can be drawn in a plane without
|
46 |
+
any edge intersections.
|
47 |
+
|
48 |
+
Parameters
|
49 |
+
----------
|
50 |
+
G : NetworkX graph
|
51 |
+
counterexample : bool
|
52 |
+
A Kuratowski subgraph (to proof non planarity) is only returned if set
|
53 |
+
to true.
|
54 |
+
|
55 |
+
Returns
|
56 |
+
-------
|
57 |
+
(is_planar, certificate) : (bool, NetworkX graph) tuple
|
58 |
+
is_planar is true if the graph is planar.
|
59 |
+
If the graph is planar `certificate` is a PlanarEmbedding
|
60 |
+
otherwise it is a Kuratowski subgraph.
|
61 |
+
|
62 |
+
Examples
|
63 |
+
--------
|
64 |
+
>>> G = nx.Graph([(0, 1), (0, 2)])
|
65 |
+
>>> is_planar, P = nx.check_planarity(G)
|
66 |
+
>>> print(is_planar)
|
67 |
+
True
|
68 |
+
|
69 |
+
When `G` is planar, a `PlanarEmbedding` instance is returned:
|
70 |
+
|
71 |
+
>>> P.get_data()
|
72 |
+
{0: [1, 2], 1: [0], 2: [0]}
|
73 |
+
|
74 |
+
Notes
|
75 |
+
-----
|
76 |
+
A (combinatorial) embedding consists of cyclic orderings of the incident
|
77 |
+
edges at each vertex. Given such an embedding there are multiple approaches
|
78 |
+
discussed in literature to drawing the graph (subject to various
|
79 |
+
constraints, e.g. integer coordinates), see e.g. [2].
|
80 |
+
|
81 |
+
The planarity check algorithm and extraction of the combinatorial embedding
|
82 |
+
is based on the Left-Right Planarity Test [1].
|
83 |
+
|
84 |
+
A counterexample is only generated if the corresponding parameter is set,
|
85 |
+
because the complexity of the counterexample generation is higher.
|
86 |
+
|
87 |
+
See also
|
88 |
+
--------
|
89 |
+
is_planar :
|
90 |
+
Check for planarity without creating a `PlanarEmbedding` or counterexample.
|
91 |
+
|
92 |
+
References
|
93 |
+
----------
|
94 |
+
.. [1] Ulrik Brandes:
|
95 |
+
The Left-Right Planarity Test
|
96 |
+
2009
|
97 |
+
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.217.9208
|
98 |
+
.. [2] Takao Nishizeki, Md Saidur Rahman:
|
99 |
+
Planar graph drawing
|
100 |
+
Lecture Notes Series on Computing: Volume 12
|
101 |
+
2004
|
102 |
+
"""
|
103 |
+
|
104 |
+
planarity_state = LRPlanarity(G)
|
105 |
+
embedding = planarity_state.lr_planarity()
|
106 |
+
if embedding is None:
|
107 |
+
# graph is not planar
|
108 |
+
if counterexample:
|
109 |
+
return False, get_counterexample(G)
|
110 |
+
else:
|
111 |
+
return False, None
|
112 |
+
else:
|
113 |
+
# graph is planar
|
114 |
+
return True, embedding
|
115 |
+
|
116 |
+
|
117 |
+
@nx._dispatchable(returns_graph=True)
|
118 |
+
def check_planarity_recursive(G, counterexample=False):
|
119 |
+
"""Recursive version of :meth:`check_planarity`."""
|
120 |
+
planarity_state = LRPlanarity(G)
|
121 |
+
embedding = planarity_state.lr_planarity_recursive()
|
122 |
+
if embedding is None:
|
123 |
+
# graph is not planar
|
124 |
+
if counterexample:
|
125 |
+
return False, get_counterexample_recursive(G)
|
126 |
+
else:
|
127 |
+
return False, None
|
128 |
+
else:
|
129 |
+
# graph is planar
|
130 |
+
return True, embedding
|
131 |
+
|
132 |
+
|
133 |
+
@nx._dispatchable(returns_graph=True)
|
134 |
+
def get_counterexample(G):
|
135 |
+
"""Obtains a Kuratowski subgraph.
|
136 |
+
|
137 |
+
Raises nx.NetworkXException if G is planar.
|
138 |
+
|
139 |
+
The function removes edges such that the graph is still not planar.
|
140 |
+
At some point the removal of any edge would make the graph planar.
|
141 |
+
This subgraph must be a Kuratowski subgraph.
|
142 |
+
|
143 |
+
Parameters
|
144 |
+
----------
|
145 |
+
G : NetworkX graph
|
146 |
+
|
147 |
+
Returns
|
148 |
+
-------
|
149 |
+
subgraph : NetworkX graph
|
150 |
+
A Kuratowski subgraph that proves that G is not planar.
|
151 |
+
|
152 |
+
"""
|
153 |
+
# copy graph
|
154 |
+
G = nx.Graph(G)
|
155 |
+
|
156 |
+
if check_planarity(G)[0]:
|
157 |
+
raise nx.NetworkXException("G is planar - no counter example.")
|
158 |
+
|
159 |
+
# find Kuratowski subgraph
|
160 |
+
subgraph = nx.Graph()
|
161 |
+
for u in G:
|
162 |
+
nbrs = list(G[u])
|
163 |
+
for v in nbrs:
|
164 |
+
G.remove_edge(u, v)
|
165 |
+
if check_planarity(G)[0]:
|
166 |
+
G.add_edge(u, v)
|
167 |
+
subgraph.add_edge(u, v)
|
168 |
+
|
169 |
+
return subgraph
|
170 |
+
|
171 |
+
|
172 |
+
@nx._dispatchable(returns_graph=True)
|
173 |
+
def get_counterexample_recursive(G):
|
174 |
+
"""Recursive version of :meth:`get_counterexample`."""
|
175 |
+
|
176 |
+
# copy graph
|
177 |
+
G = nx.Graph(G)
|
178 |
+
|
179 |
+
if check_planarity_recursive(G)[0]:
|
180 |
+
raise nx.NetworkXException("G is planar - no counter example.")
|
181 |
+
|
182 |
+
# find Kuratowski subgraph
|
183 |
+
subgraph = nx.Graph()
|
184 |
+
for u in G:
|
185 |
+
nbrs = list(G[u])
|
186 |
+
for v in nbrs:
|
187 |
+
G.remove_edge(u, v)
|
188 |
+
if check_planarity_recursive(G)[0]:
|
189 |
+
G.add_edge(u, v)
|
190 |
+
subgraph.add_edge(u, v)
|
191 |
+
|
192 |
+
return subgraph
|
193 |
+
|
194 |
+
|
195 |
+
class Interval:
|
196 |
+
"""Represents a set of return edges.
|
197 |
+
|
198 |
+
All return edges in an interval induce a same constraint on the contained
|
199 |
+
edges, which means that all edges must either have a left orientation or
|
200 |
+
all edges must have a right orientation.
|
201 |
+
"""
|
202 |
+
|
203 |
+
def __init__(self, low=None, high=None):
|
204 |
+
self.low = low
|
205 |
+
self.high = high
|
206 |
+
|
207 |
+
def empty(self):
|
208 |
+
"""Check if the interval is empty"""
|
209 |
+
return self.low is None and self.high is None
|
210 |
+
|
211 |
+
def copy(self):
|
212 |
+
"""Returns a copy of this interval"""
|
213 |
+
return Interval(self.low, self.high)
|
214 |
+
|
215 |
+
def conflicting(self, b, planarity_state):
|
216 |
+
"""Returns True if interval I conflicts with edge b"""
|
217 |
+
return (
|
218 |
+
not self.empty()
|
219 |
+
and planarity_state.lowpt[self.high] > planarity_state.lowpt[b]
|
220 |
+
)
|
221 |
+
|
222 |
+
|
223 |
+
class ConflictPair:
|
224 |
+
"""Represents a different constraint between two intervals.
|
225 |
+
|
226 |
+
The edges in the left interval must have a different orientation than
|
227 |
+
the one in the right interval.
|
228 |
+
"""
|
229 |
+
|
230 |
+
def __init__(self, left=Interval(), right=Interval()):
|
231 |
+
self.left = left
|
232 |
+
self.right = right
|
233 |
+
|
234 |
+
def swap(self):
|
235 |
+
"""Swap left and right intervals"""
|
236 |
+
temp = self.left
|
237 |
+
self.left = self.right
|
238 |
+
self.right = temp
|
239 |
+
|
240 |
+
def lowest(self, planarity_state):
|
241 |
+
"""Returns the lowest lowpoint of a conflict pair"""
|
242 |
+
if self.left.empty():
|
243 |
+
return planarity_state.lowpt[self.right.low]
|
244 |
+
if self.right.empty():
|
245 |
+
return planarity_state.lowpt[self.left.low]
|
246 |
+
return min(
|
247 |
+
planarity_state.lowpt[self.left.low], planarity_state.lowpt[self.right.low]
|
248 |
+
)
|
249 |
+
|
250 |
+
|
251 |
+
def top_of_stack(l):
|
252 |
+
"""Returns the element on top of the stack."""
|
253 |
+
if not l:
|
254 |
+
return None
|
255 |
+
return l[-1]
|
256 |
+
|
257 |
+
|
258 |
+
class LRPlanarity:
|
259 |
+
"""A class to maintain the state during planarity check."""
|
260 |
+
|
261 |
+
__slots__ = [
|
262 |
+
"G",
|
263 |
+
"roots",
|
264 |
+
"height",
|
265 |
+
"lowpt",
|
266 |
+
"lowpt2",
|
267 |
+
"nesting_depth",
|
268 |
+
"parent_edge",
|
269 |
+
"DG",
|
270 |
+
"adjs",
|
271 |
+
"ordered_adjs",
|
272 |
+
"ref",
|
273 |
+
"side",
|
274 |
+
"S",
|
275 |
+
"stack_bottom",
|
276 |
+
"lowpt_edge",
|
277 |
+
"left_ref",
|
278 |
+
"right_ref",
|
279 |
+
"embedding",
|
280 |
+
]
|
281 |
+
|
282 |
+
def __init__(self, G):
|
283 |
+
# copy G without adding self-loops
|
284 |
+
self.G = nx.Graph()
|
285 |
+
self.G.add_nodes_from(G.nodes)
|
286 |
+
for e in G.edges:
|
287 |
+
if e[0] != e[1]:
|
288 |
+
self.G.add_edge(e[0], e[1])
|
289 |
+
|
290 |
+
self.roots = []
|
291 |
+
|
292 |
+
# distance from tree root
|
293 |
+
self.height = defaultdict(lambda: None)
|
294 |
+
|
295 |
+
self.lowpt = {} # height of lowest return point of an edge
|
296 |
+
self.lowpt2 = {} # height of second lowest return point
|
297 |
+
self.nesting_depth = {} # for nesting order
|
298 |
+
|
299 |
+
# None -> missing edge
|
300 |
+
self.parent_edge = defaultdict(lambda: None)
|
301 |
+
|
302 |
+
# oriented DFS graph
|
303 |
+
self.DG = nx.DiGraph()
|
304 |
+
self.DG.add_nodes_from(G.nodes)
|
305 |
+
|
306 |
+
self.adjs = {}
|
307 |
+
self.ordered_adjs = {}
|
308 |
+
|
309 |
+
self.ref = defaultdict(lambda: None)
|
310 |
+
self.side = defaultdict(lambda: 1)
|
311 |
+
|
312 |
+
# stack of conflict pairs
|
313 |
+
self.S = []
|
314 |
+
self.stack_bottom = {}
|
315 |
+
self.lowpt_edge = {}
|
316 |
+
|
317 |
+
self.left_ref = {}
|
318 |
+
self.right_ref = {}
|
319 |
+
|
320 |
+
self.embedding = PlanarEmbedding()
|
321 |
+
|
322 |
+
def lr_planarity(self):
|
323 |
+
"""Execute the LR planarity test.
|
324 |
+
|
325 |
+
Returns
|
326 |
+
-------
|
327 |
+
embedding : dict
|
328 |
+
If the graph is planar an embedding is returned. Otherwise None.
|
329 |
+
"""
|
330 |
+
if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6:
|
331 |
+
# graph is not planar
|
332 |
+
return None
|
333 |
+
|
334 |
+
# make adjacency lists for dfs
|
335 |
+
for v in self.G:
|
336 |
+
self.adjs[v] = list(self.G[v])
|
337 |
+
|
338 |
+
# orientation of the graph by depth first search traversal
|
339 |
+
for v in self.G:
|
340 |
+
if self.height[v] is None:
|
341 |
+
self.height[v] = 0
|
342 |
+
self.roots.append(v)
|
343 |
+
self.dfs_orientation(v)
|
344 |
+
|
345 |
+
# Free no longer used variables
|
346 |
+
self.G = None
|
347 |
+
self.lowpt2 = None
|
348 |
+
self.adjs = None
|
349 |
+
|
350 |
+
# testing
|
351 |
+
for v in self.DG: # sort the adjacency lists by nesting depth
|
352 |
+
# note: this sorting leads to non linear time
|
353 |
+
self.ordered_adjs[v] = sorted(
|
354 |
+
self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
|
355 |
+
)
|
356 |
+
for v in self.roots:
|
357 |
+
if not self.dfs_testing(v):
|
358 |
+
return None
|
359 |
+
|
360 |
+
# Free no longer used variables
|
361 |
+
self.height = None
|
362 |
+
self.lowpt = None
|
363 |
+
self.S = None
|
364 |
+
self.stack_bottom = None
|
365 |
+
self.lowpt_edge = None
|
366 |
+
|
367 |
+
for e in self.DG.edges:
|
368 |
+
self.nesting_depth[e] = self.sign(e) * self.nesting_depth[e]
|
369 |
+
|
370 |
+
self.embedding.add_nodes_from(self.DG.nodes)
|
371 |
+
for v in self.DG:
|
372 |
+
# sort the adjacency lists again
|
373 |
+
self.ordered_adjs[v] = sorted(
|
374 |
+
self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
|
375 |
+
)
|
376 |
+
# initialize the embedding
|
377 |
+
previous_node = None
|
378 |
+
for w in self.ordered_adjs[v]:
|
379 |
+
self.embedding.add_half_edge(v, w, ccw=previous_node)
|
380 |
+
previous_node = w
|
381 |
+
|
382 |
+
# Free no longer used variables
|
383 |
+
self.DG = None
|
384 |
+
self.nesting_depth = None
|
385 |
+
self.ref = None
|
386 |
+
|
387 |
+
# compute the complete embedding
|
388 |
+
for v in self.roots:
|
389 |
+
self.dfs_embedding(v)
|
390 |
+
|
391 |
+
# Free no longer used variables
|
392 |
+
self.roots = None
|
393 |
+
self.parent_edge = None
|
394 |
+
self.ordered_adjs = None
|
395 |
+
self.left_ref = None
|
396 |
+
self.right_ref = None
|
397 |
+
self.side = None
|
398 |
+
|
399 |
+
return self.embedding
|
400 |
+
|
401 |
+
def lr_planarity_recursive(self):
|
402 |
+
"""Recursive version of :meth:`lr_planarity`."""
|
403 |
+
if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6:
|
404 |
+
# graph is not planar
|
405 |
+
return None
|
406 |
+
|
407 |
+
# orientation of the graph by depth first search traversal
|
408 |
+
for v in self.G:
|
409 |
+
if self.height[v] is None:
|
410 |
+
self.height[v] = 0
|
411 |
+
self.roots.append(v)
|
412 |
+
self.dfs_orientation_recursive(v)
|
413 |
+
|
414 |
+
# Free no longer used variable
|
415 |
+
self.G = None
|
416 |
+
|
417 |
+
# testing
|
418 |
+
for v in self.DG: # sort the adjacency lists by nesting depth
|
419 |
+
# note: this sorting leads to non linear time
|
420 |
+
self.ordered_adjs[v] = sorted(
|
421 |
+
self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
|
422 |
+
)
|
423 |
+
for v in self.roots:
|
424 |
+
if not self.dfs_testing_recursive(v):
|
425 |
+
return None
|
426 |
+
|
427 |
+
for e in self.DG.edges:
|
428 |
+
self.nesting_depth[e] = self.sign_recursive(e) * self.nesting_depth[e]
|
429 |
+
|
430 |
+
self.embedding.add_nodes_from(self.DG.nodes)
|
431 |
+
for v in self.DG:
|
432 |
+
# sort the adjacency lists again
|
433 |
+
self.ordered_adjs[v] = sorted(
|
434 |
+
self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
|
435 |
+
)
|
436 |
+
# initialize the embedding
|
437 |
+
previous_node = None
|
438 |
+
for w in self.ordered_adjs[v]:
|
439 |
+
self.embedding.add_half_edge(v, w, ccw=previous_node)
|
440 |
+
previous_node = w
|
441 |
+
|
442 |
+
# compute the complete embedding
|
443 |
+
for v in self.roots:
|
444 |
+
self.dfs_embedding_recursive(v)
|
445 |
+
|
446 |
+
return self.embedding
|
447 |
+
|
448 |
+
def dfs_orientation(self, v):
|
449 |
+
"""Orient the graph by DFS, compute lowpoints and nesting order."""
|
450 |
+
# the recursion stack
|
451 |
+
dfs_stack = [v]
|
452 |
+
# index of next edge to handle in adjacency list of each node
|
453 |
+
ind = defaultdict(lambda: 0)
|
454 |
+
# boolean to indicate whether to skip the initial work for an edge
|
455 |
+
skip_init = defaultdict(lambda: False)
|
456 |
+
|
457 |
+
while dfs_stack:
|
458 |
+
v = dfs_stack.pop()
|
459 |
+
e = self.parent_edge[v]
|
460 |
+
|
461 |
+
for w in self.adjs[v][ind[v] :]:
|
462 |
+
vw = (v, w)
|
463 |
+
|
464 |
+
if not skip_init[vw]:
|
465 |
+
if (v, w) in self.DG.edges or (w, v) in self.DG.edges:
|
466 |
+
ind[v] += 1
|
467 |
+
continue # the edge was already oriented
|
468 |
+
|
469 |
+
self.DG.add_edge(v, w) # orient the edge
|
470 |
+
|
471 |
+
self.lowpt[vw] = self.height[v]
|
472 |
+
self.lowpt2[vw] = self.height[v]
|
473 |
+
if self.height[w] is None: # (v, w) is a tree edge
|
474 |
+
self.parent_edge[w] = vw
|
475 |
+
self.height[w] = self.height[v] + 1
|
476 |
+
|
477 |
+
dfs_stack.append(v) # revisit v after finishing w
|
478 |
+
dfs_stack.append(w) # visit w next
|
479 |
+
skip_init[vw] = True # don't redo this block
|
480 |
+
break # handle next node in dfs_stack (i.e. w)
|
481 |
+
else: # (v, w) is a back edge
|
482 |
+
self.lowpt[vw] = self.height[w]
|
483 |
+
|
484 |
+
# determine nesting graph
|
485 |
+
self.nesting_depth[vw] = 2 * self.lowpt[vw]
|
486 |
+
if self.lowpt2[vw] < self.height[v]: # chordal
|
487 |
+
self.nesting_depth[vw] += 1
|
488 |
+
|
489 |
+
# update lowpoints of parent edge e
|
490 |
+
if e is not None:
|
491 |
+
if self.lowpt[vw] < self.lowpt[e]:
|
492 |
+
self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw])
|
493 |
+
self.lowpt[e] = self.lowpt[vw]
|
494 |
+
elif self.lowpt[vw] > self.lowpt[e]:
|
495 |
+
self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw])
|
496 |
+
else:
|
497 |
+
self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw])
|
498 |
+
|
499 |
+
ind[v] += 1
|
500 |
+
|
501 |
+
def dfs_orientation_recursive(self, v):
|
502 |
+
"""Recursive version of :meth:`dfs_orientation`."""
|
503 |
+
e = self.parent_edge[v]
|
504 |
+
for w in self.G[v]:
|
505 |
+
if (v, w) in self.DG.edges or (w, v) in self.DG.edges:
|
506 |
+
continue # the edge was already oriented
|
507 |
+
vw = (v, w)
|
508 |
+
self.DG.add_edge(v, w) # orient the edge
|
509 |
+
|
510 |
+
self.lowpt[vw] = self.height[v]
|
511 |
+
self.lowpt2[vw] = self.height[v]
|
512 |
+
if self.height[w] is None: # (v, w) is a tree edge
|
513 |
+
self.parent_edge[w] = vw
|
514 |
+
self.height[w] = self.height[v] + 1
|
515 |
+
self.dfs_orientation_recursive(w)
|
516 |
+
else: # (v, w) is a back edge
|
517 |
+
self.lowpt[vw] = self.height[w]
|
518 |
+
|
519 |
+
# determine nesting graph
|
520 |
+
self.nesting_depth[vw] = 2 * self.lowpt[vw]
|
521 |
+
if self.lowpt2[vw] < self.height[v]: # chordal
|
522 |
+
self.nesting_depth[vw] += 1
|
523 |
+
|
524 |
+
# update lowpoints of parent edge e
|
525 |
+
if e is not None:
|
526 |
+
if self.lowpt[vw] < self.lowpt[e]:
|
527 |
+
self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw])
|
528 |
+
self.lowpt[e] = self.lowpt[vw]
|
529 |
+
elif self.lowpt[vw] > self.lowpt[e]:
|
530 |
+
self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw])
|
531 |
+
else:
|
532 |
+
self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw])
|
533 |
+
|
534 |
+
def dfs_testing(self, v):
|
535 |
+
"""Test for LR partition."""
|
536 |
+
# the recursion stack
|
537 |
+
dfs_stack = [v]
|
538 |
+
# index of next edge to handle in adjacency list of each node
|
539 |
+
ind = defaultdict(lambda: 0)
|
540 |
+
# boolean to indicate whether to skip the initial work for an edge
|
541 |
+
skip_init = defaultdict(lambda: False)
|
542 |
+
|
543 |
+
while dfs_stack:
|
544 |
+
v = dfs_stack.pop()
|
545 |
+
e = self.parent_edge[v]
|
546 |
+
# to indicate whether to skip the final block after the for loop
|
547 |
+
skip_final = False
|
548 |
+
|
549 |
+
for w in self.ordered_adjs[v][ind[v] :]:
|
550 |
+
ei = (v, w)
|
551 |
+
|
552 |
+
if not skip_init[ei]:
|
553 |
+
self.stack_bottom[ei] = top_of_stack(self.S)
|
554 |
+
|
555 |
+
if ei == self.parent_edge[w]: # tree edge
|
556 |
+
dfs_stack.append(v) # revisit v after finishing w
|
557 |
+
dfs_stack.append(w) # visit w next
|
558 |
+
skip_init[ei] = True # don't redo this block
|
559 |
+
skip_final = True # skip final work after breaking
|
560 |
+
break # handle next node in dfs_stack (i.e. w)
|
561 |
+
else: # back edge
|
562 |
+
self.lowpt_edge[ei] = ei
|
563 |
+
self.S.append(ConflictPair(right=Interval(ei, ei)))
|
564 |
+
|
565 |
+
# integrate new return edges
|
566 |
+
if self.lowpt[ei] < self.height[v]:
|
567 |
+
if w == self.ordered_adjs[v][0]: # e_i has return edge
|
568 |
+
self.lowpt_edge[e] = self.lowpt_edge[ei]
|
569 |
+
else: # add constraints of e_i
|
570 |
+
if not self.add_constraints(ei, e):
|
571 |
+
# graph is not planar
|
572 |
+
return False
|
573 |
+
|
574 |
+
ind[v] += 1
|
575 |
+
|
576 |
+
if not skip_final:
|
577 |
+
# remove back edges returning to parent
|
578 |
+
if e is not None: # v isn't root
|
579 |
+
self.remove_back_edges(e)
|
580 |
+
|
581 |
+
return True
|
582 |
+
|
583 |
+
def dfs_testing_recursive(self, v):
|
584 |
+
"""Recursive version of :meth:`dfs_testing`."""
|
585 |
+
e = self.parent_edge[v]
|
586 |
+
for w in self.ordered_adjs[v]:
|
587 |
+
ei = (v, w)
|
588 |
+
self.stack_bottom[ei] = top_of_stack(self.S)
|
589 |
+
if ei == self.parent_edge[w]: # tree edge
|
590 |
+
if not self.dfs_testing_recursive(w):
|
591 |
+
return False
|
592 |
+
else: # back edge
|
593 |
+
self.lowpt_edge[ei] = ei
|
594 |
+
self.S.append(ConflictPair(right=Interval(ei, ei)))
|
595 |
+
|
596 |
+
# integrate new return edges
|
597 |
+
if self.lowpt[ei] < self.height[v]:
|
598 |
+
if w == self.ordered_adjs[v][0]: # e_i has return edge
|
599 |
+
self.lowpt_edge[e] = self.lowpt_edge[ei]
|
600 |
+
else: # add constraints of e_i
|
601 |
+
if not self.add_constraints(ei, e):
|
602 |
+
# graph is not planar
|
603 |
+
return False
|
604 |
+
|
605 |
+
# remove back edges returning to parent
|
606 |
+
if e is not None: # v isn't root
|
607 |
+
self.remove_back_edges(e)
|
608 |
+
return True
|
609 |
+
|
610 |
+
def add_constraints(self, ei, e):
|
611 |
+
P = ConflictPair()
|
612 |
+
# merge return edges of e_i into P.right
|
613 |
+
while True:
|
614 |
+
Q = self.S.pop()
|
615 |
+
if not Q.left.empty():
|
616 |
+
Q.swap()
|
617 |
+
if not Q.left.empty(): # not planar
|
618 |
+
return False
|
619 |
+
if self.lowpt[Q.right.low] > self.lowpt[e]:
|
620 |
+
# merge intervals
|
621 |
+
if P.right.empty(): # topmost interval
|
622 |
+
P.right = Q.right.copy()
|
623 |
+
else:
|
624 |
+
self.ref[P.right.low] = Q.right.high
|
625 |
+
P.right.low = Q.right.low
|
626 |
+
else: # align
|
627 |
+
self.ref[Q.right.low] = self.lowpt_edge[e]
|
628 |
+
if top_of_stack(self.S) == self.stack_bottom[ei]:
|
629 |
+
break
|
630 |
+
# merge conflicting return edges of e_1,...,e_i-1 into P.L
|
631 |
+
while top_of_stack(self.S).left.conflicting(ei, self) or top_of_stack(
|
632 |
+
self.S
|
633 |
+
).right.conflicting(ei, self):
|
634 |
+
Q = self.S.pop()
|
635 |
+
if Q.right.conflicting(ei, self):
|
636 |
+
Q.swap()
|
637 |
+
if Q.right.conflicting(ei, self): # not planar
|
638 |
+
return False
|
639 |
+
# merge interval below lowpt(e_i) into P.R
|
640 |
+
self.ref[P.right.low] = Q.right.high
|
641 |
+
if Q.right.low is not None:
|
642 |
+
P.right.low = Q.right.low
|
643 |
+
|
644 |
+
if P.left.empty(): # topmost interval
|
645 |
+
P.left = Q.left.copy()
|
646 |
+
else:
|
647 |
+
self.ref[P.left.low] = Q.left.high
|
648 |
+
P.left.low = Q.left.low
|
649 |
+
|
650 |
+
if not (P.left.empty() and P.right.empty()):
|
651 |
+
self.S.append(P)
|
652 |
+
return True
|
653 |
+
|
654 |
+
def remove_back_edges(self, e):
|
655 |
+
u = e[0]
|
656 |
+
# trim back edges ending at parent u
|
657 |
+
# drop entire conflict pairs
|
658 |
+
while self.S and top_of_stack(self.S).lowest(self) == self.height[u]:
|
659 |
+
P = self.S.pop()
|
660 |
+
if P.left.low is not None:
|
661 |
+
self.side[P.left.low] = -1
|
662 |
+
|
663 |
+
if self.S: # one more conflict pair to consider
|
664 |
+
P = self.S.pop()
|
665 |
+
# trim left interval
|
666 |
+
while P.left.high is not None and P.left.high[1] == u:
|
667 |
+
P.left.high = self.ref[P.left.high]
|
668 |
+
if P.left.high is None and P.left.low is not None:
|
669 |
+
# just emptied
|
670 |
+
self.ref[P.left.low] = P.right.low
|
671 |
+
self.side[P.left.low] = -1
|
672 |
+
P.left.low = None
|
673 |
+
# trim right interval
|
674 |
+
while P.right.high is not None and P.right.high[1] == u:
|
675 |
+
P.right.high = self.ref[P.right.high]
|
676 |
+
if P.right.high is None and P.right.low is not None:
|
677 |
+
# just emptied
|
678 |
+
self.ref[P.right.low] = P.left.low
|
679 |
+
self.side[P.right.low] = -1
|
680 |
+
P.right.low = None
|
681 |
+
self.S.append(P)
|
682 |
+
|
683 |
+
# side of e is side of a highest return edge
|
684 |
+
if self.lowpt[e] < self.height[u]: # e has return edge
|
685 |
+
hl = top_of_stack(self.S).left.high
|
686 |
+
hr = top_of_stack(self.S).right.high
|
687 |
+
|
688 |
+
if hl is not None and (hr is None or self.lowpt[hl] > self.lowpt[hr]):
|
689 |
+
self.ref[e] = hl
|
690 |
+
else:
|
691 |
+
self.ref[e] = hr
|
692 |
+
|
693 |
+
def dfs_embedding(self, v):
|
694 |
+
"""Completes the embedding."""
|
695 |
+
# the recursion stack
|
696 |
+
dfs_stack = [v]
|
697 |
+
# index of next edge to handle in adjacency list of each node
|
698 |
+
ind = defaultdict(lambda: 0)
|
699 |
+
|
700 |
+
while dfs_stack:
|
701 |
+
v = dfs_stack.pop()
|
702 |
+
|
703 |
+
for w in self.ordered_adjs[v][ind[v] :]:
|
704 |
+
ind[v] += 1
|
705 |
+
ei = (v, w)
|
706 |
+
|
707 |
+
if ei == self.parent_edge[w]: # tree edge
|
708 |
+
self.embedding.add_half_edge_first(w, v)
|
709 |
+
self.left_ref[v] = w
|
710 |
+
self.right_ref[v] = w
|
711 |
+
|
712 |
+
dfs_stack.append(v) # revisit v after finishing w
|
713 |
+
dfs_stack.append(w) # visit w next
|
714 |
+
break # handle next node in dfs_stack (i.e. w)
|
715 |
+
else: # back edge
|
716 |
+
if self.side[ei] == 1:
|
717 |
+
self.embedding.add_half_edge(w, v, ccw=self.right_ref[w])
|
718 |
+
else:
|
719 |
+
self.embedding.add_half_edge(w, v, cw=self.left_ref[w])
|
720 |
+
self.left_ref[w] = v
|
721 |
+
|
722 |
+
def dfs_embedding_recursive(self, v):
|
723 |
+
"""Recursive version of :meth:`dfs_embedding`."""
|
724 |
+
for w in self.ordered_adjs[v]:
|
725 |
+
ei = (v, w)
|
726 |
+
if ei == self.parent_edge[w]: # tree edge
|
727 |
+
self.embedding.add_half_edge_first(w, v)
|
728 |
+
self.left_ref[v] = w
|
729 |
+
self.right_ref[v] = w
|
730 |
+
self.dfs_embedding_recursive(w)
|
731 |
+
else: # back edge
|
732 |
+
if self.side[ei] == 1:
|
733 |
+
# place v directly after right_ref[w] in embed. list of w
|
734 |
+
self.embedding.add_half_edge(w, v, ccw=self.right_ref[w])
|
735 |
+
else:
|
736 |
+
# place v directly before left_ref[w] in embed. list of w
|
737 |
+
self.embedding.add_half_edge(w, v, cw=self.left_ref[w])
|
738 |
+
self.left_ref[w] = v
|
739 |
+
|
740 |
+
def sign(self, e):
|
741 |
+
"""Resolve the relative side of an edge to the absolute side."""
|
742 |
+
# the recursion stack
|
743 |
+
dfs_stack = [e]
|
744 |
+
# dict to remember reference edges
|
745 |
+
old_ref = defaultdict(lambda: None)
|
746 |
+
|
747 |
+
while dfs_stack:
|
748 |
+
e = dfs_stack.pop()
|
749 |
+
|
750 |
+
if self.ref[e] is not None:
|
751 |
+
dfs_stack.append(e) # revisit e after finishing self.ref[e]
|
752 |
+
dfs_stack.append(self.ref[e]) # visit self.ref[e] next
|
753 |
+
old_ref[e] = self.ref[e] # remember value of self.ref[e]
|
754 |
+
self.ref[e] = None
|
755 |
+
else:
|
756 |
+
self.side[e] *= self.side[old_ref[e]]
|
757 |
+
|
758 |
+
return self.side[e]
|
759 |
+
|
760 |
+
def sign_recursive(self, e):
|
761 |
+
"""Recursive version of :meth:`sign`."""
|
762 |
+
if self.ref[e] is not None:
|
763 |
+
self.side[e] = self.side[e] * self.sign_recursive(self.ref[e])
|
764 |
+
self.ref[e] = None
|
765 |
+
return self.side[e]
|
766 |
+
|
767 |
+
|
768 |
+
class PlanarEmbedding(nx.DiGraph):
|
769 |
+
"""Represents a planar graph with its planar embedding.
|
770 |
+
|
771 |
+
The planar embedding is given by a `combinatorial embedding
|
772 |
+
<https://en.wikipedia.org/wiki/Graph_embedding#Combinatorial_embedding>`_.
|
773 |
+
|
774 |
+
.. note:: `check_planarity` is the preferred way to check if a graph is planar.
|
775 |
+
|
776 |
+
**Neighbor ordering:**
|
777 |
+
|
778 |
+
In comparison to a usual graph structure, the embedding also stores the
|
779 |
+
order of all neighbors for every vertex.
|
780 |
+
The order of the neighbors can be given in clockwise (cw) direction or
|
781 |
+
counterclockwise (ccw) direction. This order is stored as edge attributes
|
782 |
+
in the underlying directed graph. For the edge (u, v) the edge attribute
|
783 |
+
'cw' is set to the neighbor of u that follows immediately after v in
|
784 |
+
clockwise direction.
|
785 |
+
|
786 |
+
In order for a PlanarEmbedding to be valid it must fulfill multiple
|
787 |
+
conditions. It is possible to check if these conditions are fulfilled with
|
788 |
+
the method :meth:`check_structure`.
|
789 |
+
The conditions are:
|
790 |
+
|
791 |
+
* Edges must go in both directions (because the edge attributes differ)
|
792 |
+
* Every edge must have a 'cw' and 'ccw' attribute which corresponds to a
|
793 |
+
correct planar embedding.
|
794 |
+
|
795 |
+
As long as a PlanarEmbedding is invalid only the following methods should
|
796 |
+
be called:
|
797 |
+
|
798 |
+
* :meth:`add_half_edge`
|
799 |
+
* :meth:`connect_components`
|
800 |
+
|
801 |
+
Even though the graph is a subclass of nx.DiGraph, it can still be used
|
802 |
+
for algorithms that require undirected graphs, because the method
|
803 |
+
:meth:`is_directed` is overridden. This is possible, because a valid
|
804 |
+
PlanarGraph must have edges in both directions.
|
805 |
+
|
806 |
+
**Half edges:**
|
807 |
+
|
808 |
+
In methods like `add_half_edge` the term "half-edge" is used, which is
|
809 |
+
a term that is used in `doubly connected edge lists
|
810 |
+
<https://en.wikipedia.org/wiki/Doubly_connected_edge_list>`_. It is used
|
811 |
+
to emphasize that the edge is only in one direction and there exists
|
812 |
+
another half-edge in the opposite direction.
|
813 |
+
While conventional edges always have two faces (including outer face) next
|
814 |
+
to them, it is possible to assign each half-edge *exactly one* face.
|
815 |
+
For a half-edge (u, v) that is oriented such that u is below v then the
|
816 |
+
face that belongs to (u, v) is to the right of this half-edge.
|
817 |
+
|
818 |
+
See Also
|
819 |
+
--------
|
820 |
+
is_planar :
|
821 |
+
Preferred way to check if an existing graph is planar.
|
822 |
+
|
823 |
+
check_planarity :
|
824 |
+
A convenient way to create a `PlanarEmbedding`. If not planar,
|
825 |
+
it returns a subgraph that shows this.
|
826 |
+
|
827 |
+
Examples
|
828 |
+
--------
|
829 |
+
|
830 |
+
Create an embedding of a star graph (compare `nx.star_graph(3)`):
|
831 |
+
|
832 |
+
>>> G = nx.PlanarEmbedding()
|
833 |
+
>>> G.add_half_edge(0, 1)
|
834 |
+
>>> G.add_half_edge(0, 2, ccw=1)
|
835 |
+
>>> G.add_half_edge(0, 3, ccw=2)
|
836 |
+
>>> G.add_half_edge(1, 0)
|
837 |
+
>>> G.add_half_edge(2, 0)
|
838 |
+
>>> G.add_half_edge(3, 0)
|
839 |
+
|
840 |
+
Alternatively the same embedding can also be defined in counterclockwise
|
841 |
+
orientation. The following results in exactly the same PlanarEmbedding:
|
842 |
+
|
843 |
+
>>> G = nx.PlanarEmbedding()
|
844 |
+
>>> G.add_half_edge(0, 1)
|
845 |
+
>>> G.add_half_edge(0, 3, cw=1)
|
846 |
+
>>> G.add_half_edge(0, 2, cw=3)
|
847 |
+
>>> G.add_half_edge(1, 0)
|
848 |
+
>>> G.add_half_edge(2, 0)
|
849 |
+
>>> G.add_half_edge(3, 0)
|
850 |
+
|
851 |
+
After creating a graph, it is possible to validate that the PlanarEmbedding
|
852 |
+
object is correct:
|
853 |
+
|
854 |
+
>>> G.check_structure()
|
855 |
+
|
856 |
+
"""
|
857 |
+
|
858 |
+
def __init__(self, incoming_graph_data=None, **attr):
|
859 |
+
super().__init__(incoming_graph_data=incoming_graph_data, **attr)
|
860 |
+
self.add_edge = self.__forbidden
|
861 |
+
self.add_edges_from = self.__forbidden
|
862 |
+
self.add_weighted_edges_from = self.__forbidden
|
863 |
+
|
864 |
+
def __forbidden(self, *args, **kwargs):
|
865 |
+
"""Forbidden operation
|
866 |
+
|
867 |
+
Any edge additions to a PlanarEmbedding should be done using
|
868 |
+
method `add_half_edge`.
|
869 |
+
"""
|
870 |
+
raise NotImplementedError(
|
871 |
+
"Use `add_half_edge` method to add edges to a PlanarEmbedding."
|
872 |
+
)
|
873 |
+
|
874 |
+
def get_data(self):
|
875 |
+
"""Converts the adjacency structure into a better readable structure.
|
876 |
+
|
877 |
+
Returns
|
878 |
+
-------
|
879 |
+
embedding : dict
|
880 |
+
A dict mapping all nodes to a list of neighbors sorted in
|
881 |
+
clockwise order.
|
882 |
+
|
883 |
+
See Also
|
884 |
+
--------
|
885 |
+
set_data
|
886 |
+
|
887 |
+
"""
|
888 |
+
embedding = {}
|
889 |
+
for v in self:
|
890 |
+
embedding[v] = list(self.neighbors_cw_order(v))
|
891 |
+
return embedding
|
892 |
+
|
893 |
+
def set_data(self, data):
|
894 |
+
"""Inserts edges according to given sorted neighbor list.
|
895 |
+
|
896 |
+
The input format is the same as the output format of get_data().
|
897 |
+
|
898 |
+
Parameters
|
899 |
+
----------
|
900 |
+
data : dict
|
901 |
+
A dict mapping all nodes to a list of neighbors sorted in
|
902 |
+
clockwise order.
|
903 |
+
|
904 |
+
See Also
|
905 |
+
--------
|
906 |
+
get_data
|
907 |
+
|
908 |
+
"""
|
909 |
+
for v in data:
|
910 |
+
ref = None
|
911 |
+
for w in reversed(data[v]):
|
912 |
+
self.add_half_edge(v, w, cw=ref)
|
913 |
+
ref = w
|
914 |
+
|
915 |
+
def remove_node(self, n):
|
916 |
+
"""Remove node n.
|
917 |
+
|
918 |
+
Removes the node n and all adjacent edges, updating the
|
919 |
+
PlanarEmbedding to account for any resulting edge removal.
|
920 |
+
Attempting to remove a non-existent node will raise an exception.
|
921 |
+
|
922 |
+
Parameters
|
923 |
+
----------
|
924 |
+
n : node
|
925 |
+
A node in the graph
|
926 |
+
|
927 |
+
Raises
|
928 |
+
------
|
929 |
+
NetworkXError
|
930 |
+
If n is not in the graph.
|
931 |
+
|
932 |
+
See Also
|
933 |
+
--------
|
934 |
+
remove_nodes_from
|
935 |
+
|
936 |
+
"""
|
937 |
+
try:
|
938 |
+
for u in self._pred[n]:
|
939 |
+
succs_u = self._succ[u]
|
940 |
+
un_cw = succs_u[n]["cw"]
|
941 |
+
un_ccw = succs_u[n]["ccw"]
|
942 |
+
del succs_u[n]
|
943 |
+
del self._pred[u][n]
|
944 |
+
if n != un_cw:
|
945 |
+
succs_u[un_cw]["ccw"] = un_ccw
|
946 |
+
succs_u[un_ccw]["cw"] = un_cw
|
947 |
+
del self._node[n]
|
948 |
+
del self._succ[n]
|
949 |
+
del self._pred[n]
|
950 |
+
except KeyError as err: # NetworkXError if n not in self
|
951 |
+
raise nx.NetworkXError(
|
952 |
+
f"The node {n} is not in the planar embedding."
|
953 |
+
) from err
|
954 |
+
nx._clear_cache(self)
|
955 |
+
|
956 |
+
def remove_nodes_from(self, nodes):
|
957 |
+
"""Remove multiple nodes.
|
958 |
+
|
959 |
+
Parameters
|
960 |
+
----------
|
961 |
+
nodes : iterable container
|
962 |
+
A container of nodes (list, dict, set, etc.). If a node
|
963 |
+
in the container is not in the graph it is silently ignored.
|
964 |
+
|
965 |
+
See Also
|
966 |
+
--------
|
967 |
+
remove_node
|
968 |
+
|
969 |
+
Notes
|
970 |
+
-----
|
971 |
+
When removing nodes from an iterator over the graph you are changing,
|
972 |
+
a `RuntimeError` will be raised with message:
|
973 |
+
`RuntimeError: dictionary changed size during iteration`. This
|
974 |
+
happens when the graph's underlying dictionary is modified during
|
975 |
+
iteration. To avoid this error, evaluate the iterator into a separate
|
976 |
+
object, e.g. by using `list(iterator_of_nodes)`, and pass this
|
977 |
+
object to `G.remove_nodes_from`.
|
978 |
+
|
979 |
+
"""
|
980 |
+
for n in nodes:
|
981 |
+
if n in self._node:
|
982 |
+
self.remove_node(n)
|
983 |
+
# silently skip non-existing nodes
|
984 |
+
|
985 |
+
def neighbors_cw_order(self, v):
|
986 |
+
"""Generator for the neighbors of v in clockwise order.
|
987 |
+
|
988 |
+
Parameters
|
989 |
+
----------
|
990 |
+
v : node
|
991 |
+
|
992 |
+
Yields
|
993 |
+
------
|
994 |
+
node
|
995 |
+
|
996 |
+
"""
|
997 |
+
succs = self._succ[v]
|
998 |
+
if not succs:
|
999 |
+
# v has no neighbors
|
1000 |
+
return
|
1001 |
+
start_node = next(reversed(succs))
|
1002 |
+
yield start_node
|
1003 |
+
current_node = succs[start_node]["cw"]
|
1004 |
+
while start_node != current_node:
|
1005 |
+
yield current_node
|
1006 |
+
current_node = succs[current_node]["cw"]
|
1007 |
+
|
1008 |
+
def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None):
|
1009 |
+
"""Adds a half-edge from `start_node` to `end_node`.
|
1010 |
+
|
1011 |
+
If the half-edge is not the first one out of `start_node`, a reference
|
1012 |
+
node must be provided either in the clockwise (parameter `cw`) or in
|
1013 |
+
the counterclockwise (parameter `ccw`) direction. Only one of `cw`/`ccw`
|
1014 |
+
can be specified (or neither in the case of the first edge).
|
1015 |
+
Note that specifying a reference in the clockwise (`cw`) direction means
|
1016 |
+
inserting the new edge in the first counterclockwise position with
|
1017 |
+
respect to the reference (and vice-versa).
|
1018 |
+
|
1019 |
+
Parameters
|
1020 |
+
----------
|
1021 |
+
start_node : node
|
1022 |
+
Start node of inserted edge.
|
1023 |
+
end_node : node
|
1024 |
+
End node of inserted edge.
|
1025 |
+
cw, ccw: node
|
1026 |
+
End node of reference edge.
|
1027 |
+
Omit or pass `None` if adding the first out-half-edge of `start_node`.
|
1028 |
+
|
1029 |
+
|
1030 |
+
Raises
|
1031 |
+
------
|
1032 |
+
NetworkXException
|
1033 |
+
If the `cw` or `ccw` node is not a successor of `start_node`.
|
1034 |
+
If `start_node` has successors, but neither `cw` or `ccw` is provided.
|
1035 |
+
If both `cw` and `ccw` are specified.
|
1036 |
+
|
1037 |
+
See Also
|
1038 |
+
--------
|
1039 |
+
connect_components
|
1040 |
+
"""
|
1041 |
+
|
1042 |
+
succs = self._succ.get(start_node)
|
1043 |
+
if succs:
|
1044 |
+
# there is already some edge out of start_node
|
1045 |
+
leftmost_nbr = next(reversed(self._succ[start_node]))
|
1046 |
+
if cw is not None:
|
1047 |
+
if cw not in succs:
|
1048 |
+
raise nx.NetworkXError("Invalid clockwise reference node.")
|
1049 |
+
if ccw is not None:
|
1050 |
+
raise nx.NetworkXError("Only one of cw/ccw can be specified.")
|
1051 |
+
ref_ccw = succs[cw]["ccw"]
|
1052 |
+
super().add_edge(start_node, end_node, cw=cw, ccw=ref_ccw)
|
1053 |
+
succs[ref_ccw]["cw"] = end_node
|
1054 |
+
succs[cw]["ccw"] = end_node
|
1055 |
+
# when (cw == leftmost_nbr), the newly added neighbor is
|
1056 |
+
# already at the end of dict self._succ[start_node] and
|
1057 |
+
# takes the place of the former leftmost_nbr
|
1058 |
+
move_leftmost_nbr_to_end = cw != leftmost_nbr
|
1059 |
+
elif ccw is not None:
|
1060 |
+
if ccw not in succs:
|
1061 |
+
raise nx.NetworkXError("Invalid counterclockwise reference node.")
|
1062 |
+
ref_cw = succs[ccw]["cw"]
|
1063 |
+
super().add_edge(start_node, end_node, cw=ref_cw, ccw=ccw)
|
1064 |
+
succs[ref_cw]["ccw"] = end_node
|
1065 |
+
succs[ccw]["cw"] = end_node
|
1066 |
+
move_leftmost_nbr_to_end = True
|
1067 |
+
else:
|
1068 |
+
raise nx.NetworkXError(
|
1069 |
+
"Node already has out-half-edge(s), either cw or ccw reference node required."
|
1070 |
+
)
|
1071 |
+
if move_leftmost_nbr_to_end:
|
1072 |
+
# LRPlanarity (via self.add_half_edge_first()) requires that
|
1073 |
+
# we keep track of the leftmost neighbor, which we accomplish
|
1074 |
+
# by keeping it as the last key in dict self._succ[start_node]
|
1075 |
+
succs[leftmost_nbr] = succs.pop(leftmost_nbr)
|
1076 |
+
|
1077 |
+
else:
|
1078 |
+
if cw is not None or ccw is not None:
|
1079 |
+
raise nx.NetworkXError("Invalid reference node.")
|
1080 |
+
# adding the first edge out of start_node
|
1081 |
+
super().add_edge(start_node, end_node, ccw=end_node, cw=end_node)
|
1082 |
+
|
1083 |
+
def check_structure(self):
|
1084 |
+
"""Runs without exceptions if this object is valid.
|
1085 |
+
|
1086 |
+
Checks that the following properties are fulfilled:
|
1087 |
+
|
1088 |
+
* Edges go in both directions (because the edge attributes differ).
|
1089 |
+
* Every edge has a 'cw' and 'ccw' attribute which corresponds to a
|
1090 |
+
correct planar embedding.
|
1091 |
+
|
1092 |
+
Running this method verifies that the underlying Graph must be planar.
|
1093 |
+
|
1094 |
+
Raises
|
1095 |
+
------
|
1096 |
+
NetworkXException
|
1097 |
+
This exception is raised with a short explanation if the
|
1098 |
+
PlanarEmbedding is invalid.
|
1099 |
+
"""
|
1100 |
+
# Check fundamental structure
|
1101 |
+
for v in self:
|
1102 |
+
try:
|
1103 |
+
sorted_nbrs = set(self.neighbors_cw_order(v))
|
1104 |
+
except KeyError as err:
|
1105 |
+
msg = f"Bad embedding. Missing orientation for a neighbor of {v}"
|
1106 |
+
raise nx.NetworkXException(msg) from err
|
1107 |
+
|
1108 |
+
unsorted_nbrs = set(self[v])
|
1109 |
+
if sorted_nbrs != unsorted_nbrs:
|
1110 |
+
msg = "Bad embedding. Edge orientations not set correctly."
|
1111 |
+
raise nx.NetworkXException(msg)
|
1112 |
+
for w in self[v]:
|
1113 |
+
# Check if opposite half-edge exists
|
1114 |
+
if not self.has_edge(w, v):
|
1115 |
+
msg = "Bad embedding. Opposite half-edge is missing."
|
1116 |
+
raise nx.NetworkXException(msg)
|
1117 |
+
|
1118 |
+
# Check planarity
|
1119 |
+
counted_half_edges = set()
|
1120 |
+
for component in nx.connected_components(self):
|
1121 |
+
if len(component) == 1:
|
1122 |
+
# Don't need to check single node component
|
1123 |
+
continue
|
1124 |
+
num_nodes = len(component)
|
1125 |
+
num_half_edges = 0
|
1126 |
+
num_faces = 0
|
1127 |
+
for v in component:
|
1128 |
+
for w in self.neighbors_cw_order(v):
|
1129 |
+
num_half_edges += 1
|
1130 |
+
if (v, w) not in counted_half_edges:
|
1131 |
+
# We encountered a new face
|
1132 |
+
num_faces += 1
|
1133 |
+
# Mark all half-edges belonging to this face
|
1134 |
+
self.traverse_face(v, w, counted_half_edges)
|
1135 |
+
num_edges = num_half_edges // 2 # num_half_edges is even
|
1136 |
+
if num_nodes - num_edges + num_faces != 2:
|
1137 |
+
# The result does not match Euler's formula
|
1138 |
+
msg = "Bad embedding. The graph does not match Euler's formula"
|
1139 |
+
raise nx.NetworkXException(msg)
|
1140 |
+
|
1141 |
+
def add_half_edge_ccw(self, start_node, end_node, reference_neighbor):
|
1142 |
+
"""Adds a half-edge from start_node to end_node.
|
1143 |
+
|
1144 |
+
The half-edge is added counter clockwise next to the existing half-edge
|
1145 |
+
(start_node, reference_neighbor).
|
1146 |
+
|
1147 |
+
Parameters
|
1148 |
+
----------
|
1149 |
+
start_node : node
|
1150 |
+
Start node of inserted edge.
|
1151 |
+
end_node : node
|
1152 |
+
End node of inserted edge.
|
1153 |
+
reference_neighbor: node
|
1154 |
+
End node of reference edge.
|
1155 |
+
|
1156 |
+
Raises
|
1157 |
+
------
|
1158 |
+
NetworkXException
|
1159 |
+
If the reference_neighbor does not exist.
|
1160 |
+
|
1161 |
+
See Also
|
1162 |
+
--------
|
1163 |
+
add_half_edge
|
1164 |
+
add_half_edge_cw
|
1165 |
+
connect_components
|
1166 |
+
|
1167 |
+
"""
|
1168 |
+
self.add_half_edge(start_node, end_node, cw=reference_neighbor)
|
1169 |
+
|
1170 |
+
def add_half_edge_cw(self, start_node, end_node, reference_neighbor):
|
1171 |
+
"""Adds a half-edge from start_node to end_node.
|
1172 |
+
|
1173 |
+
The half-edge is added clockwise next to the existing half-edge
|
1174 |
+
(start_node, reference_neighbor).
|
1175 |
+
|
1176 |
+
Parameters
|
1177 |
+
----------
|
1178 |
+
start_node : node
|
1179 |
+
Start node of inserted edge.
|
1180 |
+
end_node : node
|
1181 |
+
End node of inserted edge.
|
1182 |
+
reference_neighbor: node
|
1183 |
+
End node of reference edge.
|
1184 |
+
|
1185 |
+
Raises
|
1186 |
+
------
|
1187 |
+
NetworkXException
|
1188 |
+
If the reference_neighbor does not exist.
|
1189 |
+
|
1190 |
+
See Also
|
1191 |
+
--------
|
1192 |
+
add_half_edge
|
1193 |
+
add_half_edge_ccw
|
1194 |
+
connect_components
|
1195 |
+
"""
|
1196 |
+
self.add_half_edge(start_node, end_node, ccw=reference_neighbor)
|
1197 |
+
|
1198 |
+
def remove_edge(self, u, v):
|
1199 |
+
"""Remove the edge between u and v.
|
1200 |
+
|
1201 |
+
Parameters
|
1202 |
+
----------
|
1203 |
+
u, v : nodes
|
1204 |
+
Remove the half-edges (u, v) and (v, u) and update the
|
1205 |
+
edge ordering around the removed edge.
|
1206 |
+
|
1207 |
+
Raises
|
1208 |
+
------
|
1209 |
+
NetworkXError
|
1210 |
+
If there is not an edge between u and v.
|
1211 |
+
|
1212 |
+
See Also
|
1213 |
+
--------
|
1214 |
+
remove_edges_from : remove a collection of edges
|
1215 |
+
"""
|
1216 |
+
try:
|
1217 |
+
succs_u = self._succ[u]
|
1218 |
+
succs_v = self._succ[v]
|
1219 |
+
uv_cw = succs_u[v]["cw"]
|
1220 |
+
uv_ccw = succs_u[v]["ccw"]
|
1221 |
+
vu_cw = succs_v[u]["cw"]
|
1222 |
+
vu_ccw = succs_v[u]["ccw"]
|
1223 |
+
del succs_u[v]
|
1224 |
+
del self._pred[v][u]
|
1225 |
+
del succs_v[u]
|
1226 |
+
del self._pred[u][v]
|
1227 |
+
if v != uv_cw:
|
1228 |
+
succs_u[uv_cw]["ccw"] = uv_ccw
|
1229 |
+
succs_u[uv_ccw]["cw"] = uv_cw
|
1230 |
+
if u != vu_cw:
|
1231 |
+
succs_v[vu_cw]["ccw"] = vu_ccw
|
1232 |
+
succs_v[vu_ccw]["cw"] = vu_cw
|
1233 |
+
except KeyError as err:
|
1234 |
+
raise nx.NetworkXError(
|
1235 |
+
f"The edge {u}-{v} is not in the planar embedding."
|
1236 |
+
) from err
|
1237 |
+
nx._clear_cache(self)
|
1238 |
+
|
1239 |
+
def remove_edges_from(self, ebunch):
|
1240 |
+
"""Remove all edges specified in ebunch.
|
1241 |
+
|
1242 |
+
Parameters
|
1243 |
+
----------
|
1244 |
+
ebunch: list or container of edge tuples
|
1245 |
+
Each pair of half-edges between the nodes given in the tuples
|
1246 |
+
will be removed from the graph. The nodes can be passed as:
|
1247 |
+
|
1248 |
+
- 2-tuples (u, v) half-edges (u, v) and (v, u).
|
1249 |
+
- 3-tuples (u, v, k) where k is ignored.
|
1250 |
+
|
1251 |
+
See Also
|
1252 |
+
--------
|
1253 |
+
remove_edge : remove a single edge
|
1254 |
+
|
1255 |
+
Notes
|
1256 |
+
-----
|
1257 |
+
Will fail silently if an edge in ebunch is not in the graph.
|
1258 |
+
|
1259 |
+
Examples
|
1260 |
+
--------
|
1261 |
+
>>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc
|
1262 |
+
>>> ebunch = [(1, 2), (2, 3)]
|
1263 |
+
>>> G.remove_edges_from(ebunch)
|
1264 |
+
"""
|
1265 |
+
for e in ebunch:
|
1266 |
+
u, v = e[:2] # ignore edge data
|
1267 |
+
# assuming that the PlanarEmbedding is valid, if the half_edge
|
1268 |
+
# (u, v) is in the graph, then so is half_edge (v, u)
|
1269 |
+
if u in self._succ and v in self._succ[u]:
|
1270 |
+
self.remove_edge(u, v)
|
1271 |
+
|
1272 |
+
def connect_components(self, v, w):
|
1273 |
+
"""Adds half-edges for (v, w) and (w, v) at some position.
|
1274 |
+
|
1275 |
+
This method should only be called if v and w are in different
|
1276 |
+
components, or it might break the embedding.
|
1277 |
+
This especially means that if `connect_components(v, w)`
|
1278 |
+
is called it is not allowed to call `connect_components(w, v)`
|
1279 |
+
afterwards. The neighbor orientations in both directions are
|
1280 |
+
all set correctly after the first call.
|
1281 |
+
|
1282 |
+
Parameters
|
1283 |
+
----------
|
1284 |
+
v : node
|
1285 |
+
w : node
|
1286 |
+
|
1287 |
+
See Also
|
1288 |
+
--------
|
1289 |
+
add_half_edge
|
1290 |
+
"""
|
1291 |
+
if v in self._succ and self._succ[v]:
|
1292 |
+
ref = next(reversed(self._succ[v]))
|
1293 |
+
else:
|
1294 |
+
ref = None
|
1295 |
+
self.add_half_edge(v, w, cw=ref)
|
1296 |
+
if w in self._succ and self._succ[w]:
|
1297 |
+
ref = next(reversed(self._succ[w]))
|
1298 |
+
else:
|
1299 |
+
ref = None
|
1300 |
+
self.add_half_edge(w, v, cw=ref)
|
1301 |
+
|
1302 |
+
def add_half_edge_first(self, start_node, end_node):
|
1303 |
+
"""Add a half-edge and set end_node as start_node's leftmost neighbor.
|
1304 |
+
|
1305 |
+
The new edge is inserted counterclockwise with respect to the current
|
1306 |
+
leftmost neighbor, if there is one.
|
1307 |
+
|
1308 |
+
Parameters
|
1309 |
+
----------
|
1310 |
+
start_node : node
|
1311 |
+
end_node : node
|
1312 |
+
|
1313 |
+
See Also
|
1314 |
+
--------
|
1315 |
+
add_half_edge
|
1316 |
+
connect_components
|
1317 |
+
"""
|
1318 |
+
succs = self._succ.get(start_node)
|
1319 |
+
# the leftmost neighbor is the last entry in the
|
1320 |
+
# self._succ[start_node] dict
|
1321 |
+
leftmost_nbr = next(reversed(succs)) if succs else None
|
1322 |
+
self.add_half_edge(start_node, end_node, cw=leftmost_nbr)
|
1323 |
+
|
1324 |
+
def next_face_half_edge(self, v, w):
|
1325 |
+
"""Returns the following half-edge left of a face.
|
1326 |
+
|
1327 |
+
Parameters
|
1328 |
+
----------
|
1329 |
+
v : node
|
1330 |
+
w : node
|
1331 |
+
|
1332 |
+
Returns
|
1333 |
+
-------
|
1334 |
+
half-edge : tuple
|
1335 |
+
"""
|
1336 |
+
new_node = self[w][v]["ccw"]
|
1337 |
+
return w, new_node
|
1338 |
+
|
1339 |
+
def traverse_face(self, v, w, mark_half_edges=None):
|
1340 |
+
"""Returns nodes on the face that belong to the half-edge (v, w).
|
1341 |
+
|
1342 |
+
The face that is traversed lies to the right of the half-edge (in an
|
1343 |
+
orientation where v is below w).
|
1344 |
+
|
1345 |
+
Optionally it is possible to pass a set to which all encountered half
|
1346 |
+
edges are added. Before calling this method, this set must not include
|
1347 |
+
any half-edges that belong to the face.
|
1348 |
+
|
1349 |
+
Parameters
|
1350 |
+
----------
|
1351 |
+
v : node
|
1352 |
+
Start node of half-edge.
|
1353 |
+
w : node
|
1354 |
+
End node of half-edge.
|
1355 |
+
mark_half_edges: set, optional
|
1356 |
+
Set to which all encountered half-edges are added.
|
1357 |
+
|
1358 |
+
Returns
|
1359 |
+
-------
|
1360 |
+
face : list
|
1361 |
+
A list of nodes that lie on this face.
|
1362 |
+
"""
|
1363 |
+
if mark_half_edges is None:
|
1364 |
+
mark_half_edges = set()
|
1365 |
+
|
1366 |
+
face_nodes = [v]
|
1367 |
+
mark_half_edges.add((v, w))
|
1368 |
+
prev_node = v
|
1369 |
+
cur_node = w
|
1370 |
+
# Last half-edge is (incoming_node, v)
|
1371 |
+
incoming_node = self[v][w]["cw"]
|
1372 |
+
|
1373 |
+
while cur_node != v or prev_node != incoming_node:
|
1374 |
+
face_nodes.append(cur_node)
|
1375 |
+
prev_node, cur_node = self.next_face_half_edge(prev_node, cur_node)
|
1376 |
+
if (prev_node, cur_node) in mark_half_edges:
|
1377 |
+
raise nx.NetworkXException("Bad planar embedding. Impossible face.")
|
1378 |
+
mark_half_edges.add((prev_node, cur_node))
|
1379 |
+
|
1380 |
+
return face_nodes
|
1381 |
+
|
1382 |
+
def is_directed(self):
|
1383 |
+
"""A valid PlanarEmbedding is undirected.
|
1384 |
+
|
1385 |
+
All reverse edges are contained, i.e. for every existing
|
1386 |
+
half-edge (v, w) the half-edge in the opposite direction (w, v) is also
|
1387 |
+
contained.
|
1388 |
+
"""
|
1389 |
+
return False
|
1390 |
+
|
1391 |
+
def copy(self, as_view=False):
|
1392 |
+
if as_view is True:
|
1393 |
+
return nx.graphviews.generic_graph_view(self)
|
1394 |
+
G = self.__class__()
|
1395 |
+
G.graph.update(self.graph)
|
1396 |
+
G.add_nodes_from((n, d.copy()) for n, d in self._node.items())
|
1397 |
+
super(self.__class__, G).add_edges_from(
|
1398 |
+
(u, v, datadict.copy())
|
1399 |
+
for u, nbrs in self._adj.items()
|
1400 |
+
for v, datadict in nbrs.items()
|
1401 |
+
)
|
1402 |
+
return G
|
venv/lib/python3.10/site-packages/networkx/algorithms/polynomials.py
ADDED
@@ -0,0 +1,305 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Provides algorithms supporting the computation of graph polynomials.
|
2 |
+
|
3 |
+
Graph polynomials are polynomial-valued graph invariants that encode a wide
|
4 |
+
variety of structural information. Examples include the Tutte polynomial,
|
5 |
+
chromatic polynomial, characteristic polynomial, and matching polynomial. An
|
6 |
+
extensive treatment is provided in [1]_.
|
7 |
+
|
8 |
+
For a simple example, the `~sympy.matrices.matrices.MatrixDeterminant.charpoly`
|
9 |
+
method can be used to compute the characteristic polynomial from the adjacency
|
10 |
+
matrix of a graph. Consider the complete graph ``K_4``:
|
11 |
+
|
12 |
+
>>> import sympy
|
13 |
+
>>> x = sympy.Symbol("x")
|
14 |
+
>>> G = nx.complete_graph(4)
|
15 |
+
>>> A = nx.adjacency_matrix(G)
|
16 |
+
>>> M = sympy.SparseMatrix(A.todense())
|
17 |
+
>>> M.charpoly(x).as_expr()
|
18 |
+
x**4 - 6*x**2 - 8*x - 3
|
19 |
+
|
20 |
+
|
21 |
+
.. [1] Y. Shi, M. Dehmer, X. Li, I. Gutman,
|
22 |
+
"Graph Polynomials"
|
23 |
+
"""
|
24 |
+
from collections import deque
|
25 |
+
|
26 |
+
import networkx as nx
|
27 |
+
from networkx.utils import not_implemented_for
|
28 |
+
|
29 |
+
__all__ = ["tutte_polynomial", "chromatic_polynomial"]
|
30 |
+
|
31 |
+
|
32 |
+
@not_implemented_for("directed")
|
33 |
+
@nx._dispatchable
|
34 |
+
def tutte_polynomial(G):
|
35 |
+
r"""Returns the Tutte polynomial of `G`
|
36 |
+
|
37 |
+
This function computes the Tutte polynomial via an iterative version of
|
38 |
+
the deletion-contraction algorithm.
|
39 |
+
|
40 |
+
The Tutte polynomial `T_G(x, y)` is a fundamental graph polynomial invariant in
|
41 |
+
two variables. It encodes a wide array of information related to the
|
42 |
+
edge-connectivity of a graph; "Many problems about graphs can be reduced to
|
43 |
+
problems of finding and evaluating the Tutte polynomial at certain values" [1]_.
|
44 |
+
In fact, every deletion-contraction-expressible feature of a graph is a
|
45 |
+
specialization of the Tutte polynomial [2]_ (see Notes for examples).
|
46 |
+
|
47 |
+
There are several equivalent definitions; here are three:
|
48 |
+
|
49 |
+
Def 1 (rank-nullity expansion): For `G` an undirected graph, `n(G)` the
|
50 |
+
number of vertices of `G`, `E` the edge set of `G`, `V` the vertex set of
|
51 |
+
`G`, and `c(A)` the number of connected components of the graph with vertex
|
52 |
+
set `V` and edge set `A` [3]_:
|
53 |
+
|
54 |
+
.. math::
|
55 |
+
|
56 |
+
T_G(x, y) = \sum_{A \in E} (x-1)^{c(A) - c(E)} (y-1)^{c(A) + |A| - n(G)}
|
57 |
+
|
58 |
+
Def 2 (spanning tree expansion): Let `G` be an undirected graph, `T` a spanning
|
59 |
+
tree of `G`, and `E` the edge set of `G`. Let `E` have an arbitrary strict
|
60 |
+
linear order `L`. Let `B_e` be the unique minimal nonempty edge cut of
|
61 |
+
$E \setminus T \cup {e}$. An edge `e` is internally active with respect to
|
62 |
+
`T` and `L` if `e` is the least edge in `B_e` according to the linear order
|
63 |
+
`L`. The internal activity of `T` (denoted `i(T)`) is the number of edges
|
64 |
+
in $E \setminus T$ that are internally active with respect to `T` and `L`.
|
65 |
+
Let `P_e` be the unique path in $T \cup {e}$ whose source and target vertex
|
66 |
+
are the same. An edge `e` is externally active with respect to `T` and `L`
|
67 |
+
if `e` is the least edge in `P_e` according to the linear order `L`. The
|
68 |
+
external activity of `T` (denoted `e(T)`) is the number of edges in
|
69 |
+
$E \setminus T$ that are externally active with respect to `T` and `L`.
|
70 |
+
Then [4]_ [5]_:
|
71 |
+
|
72 |
+
.. math::
|
73 |
+
|
74 |
+
T_G(x, y) = \sum_{T \text{ a spanning tree of } G} x^{i(T)} y^{e(T)}
|
75 |
+
|
76 |
+
Def 3 (deletion-contraction recurrence): For `G` an undirected graph, `G-e`
|
77 |
+
the graph obtained from `G` by deleting edge `e`, `G/e` the graph obtained
|
78 |
+
from `G` by contracting edge `e`, `k(G)` the number of cut-edges of `G`,
|
79 |
+
and `l(G)` the number of self-loops of `G`:
|
80 |
+
|
81 |
+
.. math::
|
82 |
+
T_G(x, y) = \begin{cases}
|
83 |
+
x^{k(G)} y^{l(G)}, & \text{if all edges are cut-edges or self-loops} \\
|
84 |
+
T_{G-e}(x, y) + T_{G/e}(x, y), & \text{otherwise, for an arbitrary edge $e$ not a cut-edge or loop}
|
85 |
+
\end{cases}
|
86 |
+
|
87 |
+
Parameters
|
88 |
+
----------
|
89 |
+
G : NetworkX graph
|
90 |
+
|
91 |
+
Returns
|
92 |
+
-------
|
93 |
+
instance of `sympy.core.add.Add`
|
94 |
+
A Sympy expression representing the Tutte polynomial for `G`.
|
95 |
+
|
96 |
+
Examples
|
97 |
+
--------
|
98 |
+
>>> C = nx.cycle_graph(5)
|
99 |
+
>>> nx.tutte_polynomial(C)
|
100 |
+
x**4 + x**3 + x**2 + x + y
|
101 |
+
|
102 |
+
>>> D = nx.diamond_graph()
|
103 |
+
>>> nx.tutte_polynomial(D)
|
104 |
+
x**3 + 2*x**2 + 2*x*y + x + y**2 + y
|
105 |
+
|
106 |
+
Notes
|
107 |
+
-----
|
108 |
+
Some specializations of the Tutte polynomial:
|
109 |
+
|
110 |
+
- `T_G(1, 1)` counts the number of spanning trees of `G`
|
111 |
+
- `T_G(1, 2)` counts the number of connected spanning subgraphs of `G`
|
112 |
+
- `T_G(2, 1)` counts the number of spanning forests in `G`
|
113 |
+
- `T_G(0, 2)` counts the number of strong orientations of `G`
|
114 |
+
- `T_G(2, 0)` counts the number of acyclic orientations of `G`
|
115 |
+
|
116 |
+
Edge contraction is defined and deletion-contraction is introduced in [6]_.
|
117 |
+
Combinatorial meaning of the coefficients is introduced in [7]_.
|
118 |
+
Universality, properties, and applications are discussed in [8]_.
|
119 |
+
|
120 |
+
Practically, up-front computation of the Tutte polynomial may be useful when
|
121 |
+
users wish to repeatedly calculate edge-connectivity-related information
|
122 |
+
about one or more graphs.
|
123 |
+
|
124 |
+
References
|
125 |
+
----------
|
126 |
+
.. [1] M. Brandt,
|
127 |
+
"The Tutte Polynomial."
|
128 |
+
Talking About Combinatorial Objects Seminar, 2015
|
129 |
+
https://math.berkeley.edu/~brandtm/talks/tutte.pdf
|
130 |
+
.. [2] A. Björklund, T. Husfeldt, P. Kaski, M. Koivisto,
|
131 |
+
"Computing the Tutte polynomial in vertex-exponential time"
|
132 |
+
49th Annual IEEE Symposium on Foundations of Computer Science, 2008
|
133 |
+
https://ieeexplore.ieee.org/abstract/document/4691000
|
134 |
+
.. [3] Y. Shi, M. Dehmer, X. Li, I. Gutman,
|
135 |
+
"Graph Polynomials," p. 14
|
136 |
+
.. [4] Y. Shi, M. Dehmer, X. Li, I. Gutman,
|
137 |
+
"Graph Polynomials," p. 46
|
138 |
+
.. [5] A. Nešetril, J. Goodall,
|
139 |
+
"Graph invariants, homomorphisms, and the Tutte polynomial"
|
140 |
+
https://iuuk.mff.cuni.cz/~andrew/Tutte.pdf
|
141 |
+
.. [6] D. B. West,
|
142 |
+
"Introduction to Graph Theory," p. 84
|
143 |
+
.. [7] G. Coutinho,
|
144 |
+
"A brief introduction to the Tutte polynomial"
|
145 |
+
Structural Analysis of Complex Networks, 2011
|
146 |
+
https://homepages.dcc.ufmg.br/~gabriel/seminars/coutinho_tuttepolynomial_seminar.pdf
|
147 |
+
.. [8] J. A. Ellis-Monaghan, C. Merino,
|
148 |
+
"Graph polynomials and their applications I: The Tutte polynomial"
|
149 |
+
Structural Analysis of Complex Networks, 2011
|
150 |
+
https://arxiv.org/pdf/0803.3079.pdf
|
151 |
+
"""
|
152 |
+
import sympy
|
153 |
+
|
154 |
+
x = sympy.Symbol("x")
|
155 |
+
y = sympy.Symbol("y")
|
156 |
+
stack = deque()
|
157 |
+
stack.append(nx.MultiGraph(G))
|
158 |
+
|
159 |
+
polynomial = 0
|
160 |
+
while stack:
|
161 |
+
G = stack.pop()
|
162 |
+
bridges = set(nx.bridges(G))
|
163 |
+
|
164 |
+
e = None
|
165 |
+
for i in G.edges:
|
166 |
+
if (i[0], i[1]) not in bridges and i[0] != i[1]:
|
167 |
+
e = i
|
168 |
+
break
|
169 |
+
if not e:
|
170 |
+
loops = list(nx.selfloop_edges(G, keys=True))
|
171 |
+
polynomial += x ** len(bridges) * y ** len(loops)
|
172 |
+
else:
|
173 |
+
# deletion-contraction
|
174 |
+
C = nx.contracted_edge(G, e, self_loops=True)
|
175 |
+
C.remove_edge(e[0], e[0])
|
176 |
+
G.remove_edge(*e)
|
177 |
+
stack.append(G)
|
178 |
+
stack.append(C)
|
179 |
+
return sympy.simplify(polynomial)
|
180 |
+
|
181 |
+
|
182 |
+
@not_implemented_for("directed")
|
183 |
+
@nx._dispatchable
|
184 |
+
def chromatic_polynomial(G):
|
185 |
+
r"""Returns the chromatic polynomial of `G`
|
186 |
+
|
187 |
+
This function computes the chromatic polynomial via an iterative version of
|
188 |
+
the deletion-contraction algorithm.
|
189 |
+
|
190 |
+
The chromatic polynomial `X_G(x)` is a fundamental graph polynomial
|
191 |
+
invariant in one variable. Evaluating `X_G(k)` for an natural number `k`
|
192 |
+
enumerates the proper k-colorings of `G`.
|
193 |
+
|
194 |
+
There are several equivalent definitions; here are three:
|
195 |
+
|
196 |
+
Def 1 (explicit formula):
|
197 |
+
For `G` an undirected graph, `c(G)` the number of connected components of
|
198 |
+
`G`, `E` the edge set of `G`, and `G(S)` the spanning subgraph of `G` with
|
199 |
+
edge set `S` [1]_:
|
200 |
+
|
201 |
+
.. math::
|
202 |
+
|
203 |
+
X_G(x) = \sum_{S \subseteq E} (-1)^{|S|} x^{c(G(S))}
|
204 |
+
|
205 |
+
|
206 |
+
Def 2 (interpolating polynomial):
|
207 |
+
For `G` an undirected graph, `n(G)` the number of vertices of `G`, `k_0 = 0`,
|
208 |
+
and `k_i` the number of distinct ways to color the vertices of `G` with `i`
|
209 |
+
unique colors (for `i` a natural number at most `n(G)`), `X_G(x)` is the
|
210 |
+
unique Lagrange interpolating polynomial of degree `n(G)` through the points
|
211 |
+
`(0, k_0), (1, k_1), \dots, (n(G), k_{n(G)})` [2]_.
|
212 |
+
|
213 |
+
|
214 |
+
Def 3 (chromatic recurrence):
|
215 |
+
For `G` an undirected graph, `G-e` the graph obtained from `G` by deleting
|
216 |
+
edge `e`, `G/e` the graph obtained from `G` by contracting edge `e`, `n(G)`
|
217 |
+
the number of vertices of `G`, and `e(G)` the number of edges of `G` [3]_:
|
218 |
+
|
219 |
+
.. math::
|
220 |
+
X_G(x) = \begin{cases}
|
221 |
+
x^{n(G)}, & \text{if $e(G)=0$} \\
|
222 |
+
X_{G-e}(x) - X_{G/e}(x), & \text{otherwise, for an arbitrary edge $e$}
|
223 |
+
\end{cases}
|
224 |
+
|
225 |
+
This formulation is also known as the Fundamental Reduction Theorem [4]_.
|
226 |
+
|
227 |
+
|
228 |
+
Parameters
|
229 |
+
----------
|
230 |
+
G : NetworkX graph
|
231 |
+
|
232 |
+
Returns
|
233 |
+
-------
|
234 |
+
instance of `sympy.core.add.Add`
|
235 |
+
A Sympy expression representing the chromatic polynomial for `G`.
|
236 |
+
|
237 |
+
Examples
|
238 |
+
--------
|
239 |
+
>>> C = nx.cycle_graph(5)
|
240 |
+
>>> nx.chromatic_polynomial(C)
|
241 |
+
x**5 - 5*x**4 + 10*x**3 - 10*x**2 + 4*x
|
242 |
+
|
243 |
+
>>> G = nx.complete_graph(4)
|
244 |
+
>>> nx.chromatic_polynomial(G)
|
245 |
+
x**4 - 6*x**3 + 11*x**2 - 6*x
|
246 |
+
|
247 |
+
Notes
|
248 |
+
-----
|
249 |
+
Interpretation of the coefficients is discussed in [5]_. Several special
|
250 |
+
cases are listed in [2]_.
|
251 |
+
|
252 |
+
The chromatic polynomial is a specialization of the Tutte polynomial; in
|
253 |
+
particular, ``X_G(x) = T_G(x, 0)`` [6]_.
|
254 |
+
|
255 |
+
The chromatic polynomial may take negative arguments, though evaluations
|
256 |
+
may not have chromatic interpretations. For instance, ``X_G(-1)`` enumerates
|
257 |
+
the acyclic orientations of `G` [7]_.
|
258 |
+
|
259 |
+
References
|
260 |
+
----------
|
261 |
+
.. [1] D. B. West,
|
262 |
+
"Introduction to Graph Theory," p. 222
|
263 |
+
.. [2] E. W. Weisstein
|
264 |
+
"Chromatic Polynomial"
|
265 |
+
MathWorld--A Wolfram Web Resource
|
266 |
+
https://mathworld.wolfram.com/ChromaticPolynomial.html
|
267 |
+
.. [3] D. B. West,
|
268 |
+
"Introduction to Graph Theory," p. 221
|
269 |
+
.. [4] J. Zhang, J. Goodall,
|
270 |
+
"An Introduction to Chromatic Polynomials"
|
271 |
+
https://math.mit.edu/~apost/courses/18.204_2018/Julie_Zhang_paper.pdf
|
272 |
+
.. [5] R. C. Read,
|
273 |
+
"An Introduction to Chromatic Polynomials"
|
274 |
+
Journal of Combinatorial Theory, 1968
|
275 |
+
https://math.berkeley.edu/~mrklug/ReadChromatic.pdf
|
276 |
+
.. [6] W. T. Tutte,
|
277 |
+
"Graph-polynomials"
|
278 |
+
Advances in Applied Mathematics, 2004
|
279 |
+
https://www.sciencedirect.com/science/article/pii/S0196885803000411
|
280 |
+
.. [7] R. P. Stanley,
|
281 |
+
"Acyclic orientations of graphs"
|
282 |
+
Discrete Mathematics, 2006
|
283 |
+
https://math.mit.edu/~rstan/pubs/pubfiles/18.pdf
|
284 |
+
"""
|
285 |
+
import sympy
|
286 |
+
|
287 |
+
x = sympy.Symbol("x")
|
288 |
+
stack = deque()
|
289 |
+
stack.append(nx.MultiGraph(G, contraction_idx=0))
|
290 |
+
|
291 |
+
polynomial = 0
|
292 |
+
while stack:
|
293 |
+
G = stack.pop()
|
294 |
+
edges = list(G.edges)
|
295 |
+
if not edges:
|
296 |
+
polynomial += (-1) ** G.graph["contraction_idx"] * x ** len(G)
|
297 |
+
else:
|
298 |
+
e = edges[0]
|
299 |
+
C = nx.contracted_edge(G, e, self_loops=True)
|
300 |
+
C.graph["contraction_idx"] = G.graph["contraction_idx"] + 1
|
301 |
+
C.remove_edge(e[0], e[0])
|
302 |
+
G.remove_edge(*e)
|
303 |
+
stack.append(G)
|
304 |
+
stack.append(C)
|
305 |
+
return polynomial
|
venv/lib/python3.10/site-packages/networkx/algorithms/reciprocity.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Algorithms to calculate reciprocity in a directed graph."""
|
2 |
+
import networkx as nx
|
3 |
+
from networkx import NetworkXError
|
4 |
+
|
5 |
+
from ..utils import not_implemented_for
|
6 |
+
|
7 |
+
__all__ = ["reciprocity", "overall_reciprocity"]
|
8 |
+
|
9 |
+
|
10 |
+
@not_implemented_for("undirected", "multigraph")
|
11 |
+
@nx._dispatchable
|
12 |
+
def reciprocity(G, nodes=None):
|
13 |
+
r"""Compute the reciprocity in a directed graph.
|
14 |
+
|
15 |
+
The reciprocity of a directed graph is defined as the ratio
|
16 |
+
of the number of edges pointing in both directions to the total
|
17 |
+
number of edges in the graph.
|
18 |
+
Formally, $r = |{(u,v) \in G|(v,u) \in G}| / |{(u,v) \in G}|$.
|
19 |
+
|
20 |
+
The reciprocity of a single node u is defined similarly,
|
21 |
+
it is the ratio of the number of edges in both directions to
|
22 |
+
the total number of edges attached to node u.
|
23 |
+
|
24 |
+
Parameters
|
25 |
+
----------
|
26 |
+
G : graph
|
27 |
+
A networkx directed graph
|
28 |
+
nodes : container of nodes, optional (default=whole graph)
|
29 |
+
Compute reciprocity for nodes in this container.
|
30 |
+
|
31 |
+
Returns
|
32 |
+
-------
|
33 |
+
out : dictionary
|
34 |
+
Reciprocity keyed by node label.
|
35 |
+
|
36 |
+
Notes
|
37 |
+
-----
|
38 |
+
The reciprocity is not defined for isolated nodes.
|
39 |
+
In such cases this function will return None.
|
40 |
+
|
41 |
+
"""
|
42 |
+
# If `nodes` is not specified, calculate the reciprocity of the graph.
|
43 |
+
if nodes is None:
|
44 |
+
return overall_reciprocity(G)
|
45 |
+
|
46 |
+
# If `nodes` represents a single node in the graph, return only its
|
47 |
+
# reciprocity.
|
48 |
+
if nodes in G:
|
49 |
+
reciprocity = next(_reciprocity_iter(G, nodes))[1]
|
50 |
+
if reciprocity is None:
|
51 |
+
raise NetworkXError("Not defined for isolated nodes.")
|
52 |
+
else:
|
53 |
+
return reciprocity
|
54 |
+
|
55 |
+
# Otherwise, `nodes` represents an iterable of nodes, so return a
|
56 |
+
# dictionary mapping node to its reciprocity.
|
57 |
+
return dict(_reciprocity_iter(G, nodes))
|
58 |
+
|
59 |
+
|
60 |
+
def _reciprocity_iter(G, nodes):
|
61 |
+
"""Return an iterator of (node, reciprocity)."""
|
62 |
+
n = G.nbunch_iter(nodes)
|
63 |
+
for node in n:
|
64 |
+
pred = set(G.predecessors(node))
|
65 |
+
succ = set(G.successors(node))
|
66 |
+
overlap = pred & succ
|
67 |
+
n_total = len(pred) + len(succ)
|
68 |
+
|
69 |
+
# Reciprocity is not defined for isolated nodes.
|
70 |
+
# Return None.
|
71 |
+
if n_total == 0:
|
72 |
+
yield (node, None)
|
73 |
+
else:
|
74 |
+
reciprocity = 2 * len(overlap) / n_total
|
75 |
+
yield (node, reciprocity)
|
76 |
+
|
77 |
+
|
78 |
+
@not_implemented_for("undirected", "multigraph")
|
79 |
+
@nx._dispatchable
|
80 |
+
def overall_reciprocity(G):
|
81 |
+
"""Compute the reciprocity for the whole graph.
|
82 |
+
|
83 |
+
See the doc of reciprocity for the definition.
|
84 |
+
|
85 |
+
Parameters
|
86 |
+
----------
|
87 |
+
G : graph
|
88 |
+
A networkx graph
|
89 |
+
|
90 |
+
"""
|
91 |
+
n_all_edge = G.number_of_edges()
|
92 |
+
n_overlap_edge = (n_all_edge - G.to_undirected().number_of_edges()) * 2
|
93 |
+
|
94 |
+
if n_all_edge == 0:
|
95 |
+
raise NetworkXError("Not defined for empty graphs")
|
96 |
+
|
97 |
+
return n_overlap_edge / n_all_edge
|
venv/lib/python3.10/site-packages/networkx/algorithms/regular.py
ADDED
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing and verifying regular graphs."""
|
2 |
+
import networkx as nx
|
3 |
+
from networkx.utils import not_implemented_for
|
4 |
+
|
5 |
+
__all__ = ["is_regular", "is_k_regular", "k_factor"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable
|
9 |
+
def is_regular(G):
|
10 |
+
"""Determines whether the graph ``G`` is a regular graph.
|
11 |
+
|
12 |
+
A regular graph is a graph where each vertex has the same degree. A
|
13 |
+
regular digraph is a graph where the indegree and outdegree of each
|
14 |
+
vertex are equal.
|
15 |
+
|
16 |
+
Parameters
|
17 |
+
----------
|
18 |
+
G : NetworkX graph
|
19 |
+
|
20 |
+
Returns
|
21 |
+
-------
|
22 |
+
bool
|
23 |
+
Whether the given graph or digraph is regular.
|
24 |
+
|
25 |
+
Examples
|
26 |
+
--------
|
27 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 1)])
|
28 |
+
>>> nx.is_regular(G)
|
29 |
+
True
|
30 |
+
|
31 |
+
"""
|
32 |
+
if len(G) == 0:
|
33 |
+
raise nx.NetworkXPointlessConcept("Graph has no nodes.")
|
34 |
+
n1 = nx.utils.arbitrary_element(G)
|
35 |
+
if not G.is_directed():
|
36 |
+
d1 = G.degree(n1)
|
37 |
+
return all(d1 == d for _, d in G.degree)
|
38 |
+
else:
|
39 |
+
d_in = G.in_degree(n1)
|
40 |
+
in_regular = all(d_in == d for _, d in G.in_degree)
|
41 |
+
d_out = G.out_degree(n1)
|
42 |
+
out_regular = all(d_out == d for _, d in G.out_degree)
|
43 |
+
return in_regular and out_regular
|
44 |
+
|
45 |
+
|
46 |
+
@not_implemented_for("directed")
|
47 |
+
@nx._dispatchable
|
48 |
+
def is_k_regular(G, k):
|
49 |
+
"""Determines whether the graph ``G`` is a k-regular graph.
|
50 |
+
|
51 |
+
A k-regular graph is a graph where each vertex has degree k.
|
52 |
+
|
53 |
+
Parameters
|
54 |
+
----------
|
55 |
+
G : NetworkX graph
|
56 |
+
|
57 |
+
Returns
|
58 |
+
-------
|
59 |
+
bool
|
60 |
+
Whether the given graph is k-regular.
|
61 |
+
|
62 |
+
Examples
|
63 |
+
--------
|
64 |
+
>>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)])
|
65 |
+
>>> nx.is_k_regular(G, k=3)
|
66 |
+
False
|
67 |
+
|
68 |
+
"""
|
69 |
+
return all(d == k for n, d in G.degree)
|
70 |
+
|
71 |
+
|
72 |
+
@not_implemented_for("directed")
|
73 |
+
@not_implemented_for("multigraph")
|
74 |
+
@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True)
|
75 |
+
def k_factor(G, k, matching_weight="weight"):
|
76 |
+
"""Compute a k-factor of G
|
77 |
+
|
78 |
+
A k-factor of a graph is a spanning k-regular subgraph.
|
79 |
+
A spanning k-regular subgraph of G is a subgraph that contains
|
80 |
+
each vertex of G and a subset of the edges of G such that each
|
81 |
+
vertex has degree k.
|
82 |
+
|
83 |
+
Parameters
|
84 |
+
----------
|
85 |
+
G : NetworkX graph
|
86 |
+
Undirected graph
|
87 |
+
|
88 |
+
matching_weight: string, optional (default='weight')
|
89 |
+
Edge data key corresponding to the edge weight.
|
90 |
+
Used for finding the max-weighted perfect matching.
|
91 |
+
If key not found, uses 1 as weight.
|
92 |
+
|
93 |
+
Returns
|
94 |
+
-------
|
95 |
+
G2 : NetworkX graph
|
96 |
+
A k-factor of G
|
97 |
+
|
98 |
+
Examples
|
99 |
+
--------
|
100 |
+
>>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)])
|
101 |
+
>>> G2 = nx.k_factor(G, k=1)
|
102 |
+
>>> G2.edges()
|
103 |
+
EdgeView([(1, 2), (3, 4)])
|
104 |
+
|
105 |
+
References
|
106 |
+
----------
|
107 |
+
.. [1] "An algorithm for computing simple k-factors.",
|
108 |
+
Meijer, Henk, Yurai Núñez-Rodríguez, and David Rappaport,
|
109 |
+
Information processing letters, 2009.
|
110 |
+
"""
|
111 |
+
|
112 |
+
from networkx.algorithms.matching import is_perfect_matching, max_weight_matching
|
113 |
+
|
114 |
+
class LargeKGadget:
|
115 |
+
def __init__(self, k, degree, node, g):
|
116 |
+
self.original = node
|
117 |
+
self.g = g
|
118 |
+
self.k = k
|
119 |
+
self.degree = degree
|
120 |
+
|
121 |
+
self.outer_vertices = [(node, x) for x in range(degree)]
|
122 |
+
self.core_vertices = [(node, x + degree) for x in range(degree - k)]
|
123 |
+
|
124 |
+
def replace_node(self):
|
125 |
+
adj_view = self.g[self.original]
|
126 |
+
neighbors = list(adj_view.keys())
|
127 |
+
edge_attrs = list(adj_view.values())
|
128 |
+
for outer, neighbor, edge_attrs in zip(
|
129 |
+
self.outer_vertices, neighbors, edge_attrs
|
130 |
+
):
|
131 |
+
self.g.add_edge(outer, neighbor, **edge_attrs)
|
132 |
+
for core in self.core_vertices:
|
133 |
+
for outer in self.outer_vertices:
|
134 |
+
self.g.add_edge(core, outer)
|
135 |
+
self.g.remove_node(self.original)
|
136 |
+
|
137 |
+
def restore_node(self):
|
138 |
+
self.g.add_node(self.original)
|
139 |
+
for outer in self.outer_vertices:
|
140 |
+
adj_view = self.g[outer]
|
141 |
+
for neighbor, edge_attrs in list(adj_view.items()):
|
142 |
+
if neighbor not in self.core_vertices:
|
143 |
+
self.g.add_edge(self.original, neighbor, **edge_attrs)
|
144 |
+
break
|
145 |
+
g.remove_nodes_from(self.outer_vertices)
|
146 |
+
g.remove_nodes_from(self.core_vertices)
|
147 |
+
|
148 |
+
class SmallKGadget:
|
149 |
+
def __init__(self, k, degree, node, g):
|
150 |
+
self.original = node
|
151 |
+
self.k = k
|
152 |
+
self.degree = degree
|
153 |
+
self.g = g
|
154 |
+
|
155 |
+
self.outer_vertices = [(node, x) for x in range(degree)]
|
156 |
+
self.inner_vertices = [(node, x + degree) for x in range(degree)]
|
157 |
+
self.core_vertices = [(node, x + 2 * degree) for x in range(k)]
|
158 |
+
|
159 |
+
def replace_node(self):
|
160 |
+
adj_view = self.g[self.original]
|
161 |
+
for outer, inner, (neighbor, edge_attrs) in zip(
|
162 |
+
self.outer_vertices, self.inner_vertices, list(adj_view.items())
|
163 |
+
):
|
164 |
+
self.g.add_edge(outer, inner)
|
165 |
+
self.g.add_edge(outer, neighbor, **edge_attrs)
|
166 |
+
for core in self.core_vertices:
|
167 |
+
for inner in self.inner_vertices:
|
168 |
+
self.g.add_edge(core, inner)
|
169 |
+
self.g.remove_node(self.original)
|
170 |
+
|
171 |
+
def restore_node(self):
|
172 |
+
self.g.add_node(self.original)
|
173 |
+
for outer in self.outer_vertices:
|
174 |
+
adj_view = self.g[outer]
|
175 |
+
for neighbor, edge_attrs in adj_view.items():
|
176 |
+
if neighbor not in self.core_vertices:
|
177 |
+
self.g.add_edge(self.original, neighbor, **edge_attrs)
|
178 |
+
break
|
179 |
+
self.g.remove_nodes_from(self.outer_vertices)
|
180 |
+
self.g.remove_nodes_from(self.inner_vertices)
|
181 |
+
self.g.remove_nodes_from(self.core_vertices)
|
182 |
+
|
183 |
+
# Step 1
|
184 |
+
if any(d < k for _, d in G.degree):
|
185 |
+
raise nx.NetworkXUnfeasible("Graph contains a vertex with degree less than k")
|
186 |
+
g = G.copy()
|
187 |
+
|
188 |
+
# Step 2
|
189 |
+
gadgets = []
|
190 |
+
for node, degree in list(g.degree):
|
191 |
+
if k < degree / 2.0:
|
192 |
+
gadget = SmallKGadget(k, degree, node, g)
|
193 |
+
else:
|
194 |
+
gadget = LargeKGadget(k, degree, node, g)
|
195 |
+
gadget.replace_node()
|
196 |
+
gadgets.append(gadget)
|
197 |
+
|
198 |
+
# Step 3
|
199 |
+
matching = max_weight_matching(g, maxcardinality=True, weight=matching_weight)
|
200 |
+
|
201 |
+
# Step 4
|
202 |
+
if not is_perfect_matching(g, matching):
|
203 |
+
raise nx.NetworkXUnfeasible(
|
204 |
+
"Cannot find k-factor because no perfect matching exists"
|
205 |
+
)
|
206 |
+
|
207 |
+
for edge in g.edges():
|
208 |
+
if edge not in matching and (edge[1], edge[0]) not in matching:
|
209 |
+
g.remove_edge(edge[0], edge[1])
|
210 |
+
|
211 |
+
for gadget in gadgets:
|
212 |
+
gadget.restore_node()
|
213 |
+
|
214 |
+
return g
|
venv/lib/python3.10/site-packages/networkx/algorithms/similarity.py
ADDED
@@ -0,0 +1,1777 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" Functions measuring similarity using graph edit distance.
|
2 |
+
|
3 |
+
The graph edit distance is the number of edge/node changes needed
|
4 |
+
to make two graphs isomorphic.
|
5 |
+
|
6 |
+
The default algorithm/implementation is sub-optimal for some graphs.
|
7 |
+
The problem of finding the exact Graph Edit Distance (GED) is NP-hard
|
8 |
+
so it is often slow. If the simple interface `graph_edit_distance`
|
9 |
+
takes too long for your graph, try `optimize_graph_edit_distance`
|
10 |
+
and/or `optimize_edit_paths`.
|
11 |
+
|
12 |
+
At the same time, I encourage capable people to investigate
|
13 |
+
alternative GED algorithms, in order to improve the choices available.
|
14 |
+
"""
|
15 |
+
|
16 |
+
import math
|
17 |
+
import time
|
18 |
+
import warnings
|
19 |
+
from dataclasses import dataclass
|
20 |
+
from itertools import product
|
21 |
+
|
22 |
+
import networkx as nx
|
23 |
+
from networkx.utils import np_random_state
|
24 |
+
|
25 |
+
__all__ = [
|
26 |
+
"graph_edit_distance",
|
27 |
+
"optimal_edit_paths",
|
28 |
+
"optimize_graph_edit_distance",
|
29 |
+
"optimize_edit_paths",
|
30 |
+
"simrank_similarity",
|
31 |
+
"panther_similarity",
|
32 |
+
"generate_random_paths",
|
33 |
+
]
|
34 |
+
|
35 |
+
|
36 |
+
def debug_print(*args, **kwargs):
|
37 |
+
print(*args, **kwargs)
|
38 |
+
|
39 |
+
|
40 |
+
@nx._dispatchable(
|
41 |
+
graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True
|
42 |
+
)
|
43 |
+
def graph_edit_distance(
|
44 |
+
G1,
|
45 |
+
G2,
|
46 |
+
node_match=None,
|
47 |
+
edge_match=None,
|
48 |
+
node_subst_cost=None,
|
49 |
+
node_del_cost=None,
|
50 |
+
node_ins_cost=None,
|
51 |
+
edge_subst_cost=None,
|
52 |
+
edge_del_cost=None,
|
53 |
+
edge_ins_cost=None,
|
54 |
+
roots=None,
|
55 |
+
upper_bound=None,
|
56 |
+
timeout=None,
|
57 |
+
):
|
58 |
+
"""Returns GED (graph edit distance) between graphs G1 and G2.
|
59 |
+
|
60 |
+
Graph edit distance is a graph similarity measure analogous to
|
61 |
+
Levenshtein distance for strings. It is defined as minimum cost
|
62 |
+
of edit path (sequence of node and edge edit operations)
|
63 |
+
transforming graph G1 to graph isomorphic to G2.
|
64 |
+
|
65 |
+
Parameters
|
66 |
+
----------
|
67 |
+
G1, G2: graphs
|
68 |
+
The two graphs G1 and G2 must be of the same type.
|
69 |
+
|
70 |
+
node_match : callable
|
71 |
+
A function that returns True if node n1 in G1 and n2 in G2
|
72 |
+
should be considered equal during matching.
|
73 |
+
|
74 |
+
The function will be called like
|
75 |
+
|
76 |
+
node_match(G1.nodes[n1], G2.nodes[n2]).
|
77 |
+
|
78 |
+
That is, the function will receive the node attribute
|
79 |
+
dictionaries for n1 and n2 as inputs.
|
80 |
+
|
81 |
+
Ignored if node_subst_cost is specified. If neither
|
82 |
+
node_match nor node_subst_cost are specified then node
|
83 |
+
attributes are not considered.
|
84 |
+
|
85 |
+
edge_match : callable
|
86 |
+
A function that returns True if the edge attribute dictionaries
|
87 |
+
for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
|
88 |
+
be considered equal during matching.
|
89 |
+
|
90 |
+
The function will be called like
|
91 |
+
|
92 |
+
edge_match(G1[u1][v1], G2[u2][v2]).
|
93 |
+
|
94 |
+
That is, the function will receive the edge attribute
|
95 |
+
dictionaries of the edges under consideration.
|
96 |
+
|
97 |
+
Ignored if edge_subst_cost is specified. If neither
|
98 |
+
edge_match nor edge_subst_cost are specified then edge
|
99 |
+
attributes are not considered.
|
100 |
+
|
101 |
+
node_subst_cost, node_del_cost, node_ins_cost : callable
|
102 |
+
Functions that return the costs of node substitution, node
|
103 |
+
deletion, and node insertion, respectively.
|
104 |
+
|
105 |
+
The functions will be called like
|
106 |
+
|
107 |
+
node_subst_cost(G1.nodes[n1], G2.nodes[n2]),
|
108 |
+
node_del_cost(G1.nodes[n1]),
|
109 |
+
node_ins_cost(G2.nodes[n2]).
|
110 |
+
|
111 |
+
That is, the functions will receive the node attribute
|
112 |
+
dictionaries as inputs. The functions are expected to return
|
113 |
+
positive numeric values.
|
114 |
+
|
115 |
+
Function node_subst_cost overrides node_match if specified.
|
116 |
+
If neither node_match nor node_subst_cost are specified then
|
117 |
+
default node substitution cost of 0 is used (node attributes
|
118 |
+
are not considered during matching).
|
119 |
+
|
120 |
+
If node_del_cost is not specified then default node deletion
|
121 |
+
cost of 1 is used. If node_ins_cost is not specified then
|
122 |
+
default node insertion cost of 1 is used.
|
123 |
+
|
124 |
+
edge_subst_cost, edge_del_cost, edge_ins_cost : callable
|
125 |
+
Functions that return the costs of edge substitution, edge
|
126 |
+
deletion, and edge insertion, respectively.
|
127 |
+
|
128 |
+
The functions will be called like
|
129 |
+
|
130 |
+
edge_subst_cost(G1[u1][v1], G2[u2][v2]),
|
131 |
+
edge_del_cost(G1[u1][v1]),
|
132 |
+
edge_ins_cost(G2[u2][v2]).
|
133 |
+
|
134 |
+
That is, the functions will receive the edge attribute
|
135 |
+
dictionaries as inputs. The functions are expected to return
|
136 |
+
positive numeric values.
|
137 |
+
|
138 |
+
Function edge_subst_cost overrides edge_match if specified.
|
139 |
+
If neither edge_match nor edge_subst_cost are specified then
|
140 |
+
default edge substitution cost of 0 is used (edge attributes
|
141 |
+
are not considered during matching).
|
142 |
+
|
143 |
+
If edge_del_cost is not specified then default edge deletion
|
144 |
+
cost of 1 is used. If edge_ins_cost is not specified then
|
145 |
+
default edge insertion cost of 1 is used.
|
146 |
+
|
147 |
+
roots : 2-tuple
|
148 |
+
Tuple where first element is a node in G1 and the second
|
149 |
+
is a node in G2.
|
150 |
+
These nodes are forced to be matched in the comparison to
|
151 |
+
allow comparison between rooted graphs.
|
152 |
+
|
153 |
+
upper_bound : numeric
|
154 |
+
Maximum edit distance to consider. Return None if no edit
|
155 |
+
distance under or equal to upper_bound exists.
|
156 |
+
|
157 |
+
timeout : numeric
|
158 |
+
Maximum number of seconds to execute.
|
159 |
+
After timeout is met, the current best GED is returned.
|
160 |
+
|
161 |
+
Examples
|
162 |
+
--------
|
163 |
+
>>> G1 = nx.cycle_graph(6)
|
164 |
+
>>> G2 = nx.wheel_graph(7)
|
165 |
+
>>> nx.graph_edit_distance(G1, G2)
|
166 |
+
7.0
|
167 |
+
|
168 |
+
>>> G1 = nx.star_graph(5)
|
169 |
+
>>> G2 = nx.star_graph(5)
|
170 |
+
>>> nx.graph_edit_distance(G1, G2, roots=(0, 0))
|
171 |
+
0.0
|
172 |
+
>>> nx.graph_edit_distance(G1, G2, roots=(1, 0))
|
173 |
+
8.0
|
174 |
+
|
175 |
+
See Also
|
176 |
+
--------
|
177 |
+
optimal_edit_paths, optimize_graph_edit_distance,
|
178 |
+
|
179 |
+
is_isomorphic: test for graph edit distance of 0
|
180 |
+
|
181 |
+
References
|
182 |
+
----------
|
183 |
+
.. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick
|
184 |
+
Martineau. An Exact Graph Edit Distance Algorithm for Solving
|
185 |
+
Pattern Recognition Problems. 4th International Conference on
|
186 |
+
Pattern Recognition Applications and Methods 2015, Jan 2015,
|
187 |
+
Lisbon, Portugal. 2015,
|
188 |
+
<10.5220/0005209202710278>. <hal-01168816>
|
189 |
+
https://hal.archives-ouvertes.fr/hal-01168816
|
190 |
+
|
191 |
+
"""
|
192 |
+
bestcost = None
|
193 |
+
for _, _, cost in optimize_edit_paths(
|
194 |
+
G1,
|
195 |
+
G2,
|
196 |
+
node_match,
|
197 |
+
edge_match,
|
198 |
+
node_subst_cost,
|
199 |
+
node_del_cost,
|
200 |
+
node_ins_cost,
|
201 |
+
edge_subst_cost,
|
202 |
+
edge_del_cost,
|
203 |
+
edge_ins_cost,
|
204 |
+
upper_bound,
|
205 |
+
True,
|
206 |
+
roots,
|
207 |
+
timeout,
|
208 |
+
):
|
209 |
+
# assert bestcost is None or cost < bestcost
|
210 |
+
bestcost = cost
|
211 |
+
return bestcost
|
212 |
+
|
213 |
+
|
214 |
+
@nx._dispatchable(graphs={"G1": 0, "G2": 1})
|
215 |
+
def optimal_edit_paths(
|
216 |
+
G1,
|
217 |
+
G2,
|
218 |
+
node_match=None,
|
219 |
+
edge_match=None,
|
220 |
+
node_subst_cost=None,
|
221 |
+
node_del_cost=None,
|
222 |
+
node_ins_cost=None,
|
223 |
+
edge_subst_cost=None,
|
224 |
+
edge_del_cost=None,
|
225 |
+
edge_ins_cost=None,
|
226 |
+
upper_bound=None,
|
227 |
+
):
|
228 |
+
"""Returns all minimum-cost edit paths transforming G1 to G2.
|
229 |
+
|
230 |
+
Graph edit path is a sequence of node and edge edit operations
|
231 |
+
transforming graph G1 to graph isomorphic to G2. Edit operations
|
232 |
+
include substitutions, deletions, and insertions.
|
233 |
+
|
234 |
+
Parameters
|
235 |
+
----------
|
236 |
+
G1, G2: graphs
|
237 |
+
The two graphs G1 and G2 must be of the same type.
|
238 |
+
|
239 |
+
node_match : callable
|
240 |
+
A function that returns True if node n1 in G1 and n2 in G2
|
241 |
+
should be considered equal during matching.
|
242 |
+
|
243 |
+
The function will be called like
|
244 |
+
|
245 |
+
node_match(G1.nodes[n1], G2.nodes[n2]).
|
246 |
+
|
247 |
+
That is, the function will receive the node attribute
|
248 |
+
dictionaries for n1 and n2 as inputs.
|
249 |
+
|
250 |
+
Ignored if node_subst_cost is specified. If neither
|
251 |
+
node_match nor node_subst_cost are specified then node
|
252 |
+
attributes are not considered.
|
253 |
+
|
254 |
+
edge_match : callable
|
255 |
+
A function that returns True if the edge attribute dictionaries
|
256 |
+
for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
|
257 |
+
be considered equal during matching.
|
258 |
+
|
259 |
+
The function will be called like
|
260 |
+
|
261 |
+
edge_match(G1[u1][v1], G2[u2][v2]).
|
262 |
+
|
263 |
+
That is, the function will receive the edge attribute
|
264 |
+
dictionaries of the edges under consideration.
|
265 |
+
|
266 |
+
Ignored if edge_subst_cost is specified. If neither
|
267 |
+
edge_match nor edge_subst_cost are specified then edge
|
268 |
+
attributes are not considered.
|
269 |
+
|
270 |
+
node_subst_cost, node_del_cost, node_ins_cost : callable
|
271 |
+
Functions that return the costs of node substitution, node
|
272 |
+
deletion, and node insertion, respectively.
|
273 |
+
|
274 |
+
The functions will be called like
|
275 |
+
|
276 |
+
node_subst_cost(G1.nodes[n1], G2.nodes[n2]),
|
277 |
+
node_del_cost(G1.nodes[n1]),
|
278 |
+
node_ins_cost(G2.nodes[n2]).
|
279 |
+
|
280 |
+
That is, the functions will receive the node attribute
|
281 |
+
dictionaries as inputs. The functions are expected to return
|
282 |
+
positive numeric values.
|
283 |
+
|
284 |
+
Function node_subst_cost overrides node_match if specified.
|
285 |
+
If neither node_match nor node_subst_cost are specified then
|
286 |
+
default node substitution cost of 0 is used (node attributes
|
287 |
+
are not considered during matching).
|
288 |
+
|
289 |
+
If node_del_cost is not specified then default node deletion
|
290 |
+
cost of 1 is used. If node_ins_cost is not specified then
|
291 |
+
default node insertion cost of 1 is used.
|
292 |
+
|
293 |
+
edge_subst_cost, edge_del_cost, edge_ins_cost : callable
|
294 |
+
Functions that return the costs of edge substitution, edge
|
295 |
+
deletion, and edge insertion, respectively.
|
296 |
+
|
297 |
+
The functions will be called like
|
298 |
+
|
299 |
+
edge_subst_cost(G1[u1][v1], G2[u2][v2]),
|
300 |
+
edge_del_cost(G1[u1][v1]),
|
301 |
+
edge_ins_cost(G2[u2][v2]).
|
302 |
+
|
303 |
+
That is, the functions will receive the edge attribute
|
304 |
+
dictionaries as inputs. The functions are expected to return
|
305 |
+
positive numeric values.
|
306 |
+
|
307 |
+
Function edge_subst_cost overrides edge_match if specified.
|
308 |
+
If neither edge_match nor edge_subst_cost are specified then
|
309 |
+
default edge substitution cost of 0 is used (edge attributes
|
310 |
+
are not considered during matching).
|
311 |
+
|
312 |
+
If edge_del_cost is not specified then default edge deletion
|
313 |
+
cost of 1 is used. If edge_ins_cost is not specified then
|
314 |
+
default edge insertion cost of 1 is used.
|
315 |
+
|
316 |
+
upper_bound : numeric
|
317 |
+
Maximum edit distance to consider.
|
318 |
+
|
319 |
+
Returns
|
320 |
+
-------
|
321 |
+
edit_paths : list of tuples (node_edit_path, edge_edit_path)
|
322 |
+
node_edit_path : list of tuples (u, v)
|
323 |
+
edge_edit_path : list of tuples ((u1, v1), (u2, v2))
|
324 |
+
|
325 |
+
cost : numeric
|
326 |
+
Optimal edit path cost (graph edit distance). When the cost
|
327 |
+
is zero, it indicates that `G1` and `G2` are isomorphic.
|
328 |
+
|
329 |
+
Examples
|
330 |
+
--------
|
331 |
+
>>> G1 = nx.cycle_graph(4)
|
332 |
+
>>> G2 = nx.wheel_graph(5)
|
333 |
+
>>> paths, cost = nx.optimal_edit_paths(G1, G2)
|
334 |
+
>>> len(paths)
|
335 |
+
40
|
336 |
+
>>> cost
|
337 |
+
5.0
|
338 |
+
|
339 |
+
Notes
|
340 |
+
-----
|
341 |
+
To transform `G1` into a graph isomorphic to `G2`, apply the node
|
342 |
+
and edge edits in the returned ``edit_paths``.
|
343 |
+
In the case of isomorphic graphs, the cost is zero, and the paths
|
344 |
+
represent different isomorphic mappings (isomorphisms). That is, the
|
345 |
+
edits involve renaming nodes and edges to match the structure of `G2`.
|
346 |
+
|
347 |
+
See Also
|
348 |
+
--------
|
349 |
+
graph_edit_distance, optimize_edit_paths
|
350 |
+
|
351 |
+
References
|
352 |
+
----------
|
353 |
+
.. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick
|
354 |
+
Martineau. An Exact Graph Edit Distance Algorithm for Solving
|
355 |
+
Pattern Recognition Problems. 4th International Conference on
|
356 |
+
Pattern Recognition Applications and Methods 2015, Jan 2015,
|
357 |
+
Lisbon, Portugal. 2015,
|
358 |
+
<10.5220/0005209202710278>. <hal-01168816>
|
359 |
+
https://hal.archives-ouvertes.fr/hal-01168816
|
360 |
+
|
361 |
+
"""
|
362 |
+
paths = []
|
363 |
+
bestcost = None
|
364 |
+
for vertex_path, edge_path, cost in optimize_edit_paths(
|
365 |
+
G1,
|
366 |
+
G2,
|
367 |
+
node_match,
|
368 |
+
edge_match,
|
369 |
+
node_subst_cost,
|
370 |
+
node_del_cost,
|
371 |
+
node_ins_cost,
|
372 |
+
edge_subst_cost,
|
373 |
+
edge_del_cost,
|
374 |
+
edge_ins_cost,
|
375 |
+
upper_bound,
|
376 |
+
False,
|
377 |
+
):
|
378 |
+
# assert bestcost is None or cost <= bestcost
|
379 |
+
if bestcost is not None and cost < bestcost:
|
380 |
+
paths = []
|
381 |
+
paths.append((vertex_path, edge_path))
|
382 |
+
bestcost = cost
|
383 |
+
return paths, bestcost
|
384 |
+
|
385 |
+
|
386 |
+
@nx._dispatchable(graphs={"G1": 0, "G2": 1})
|
387 |
+
def optimize_graph_edit_distance(
|
388 |
+
G1,
|
389 |
+
G2,
|
390 |
+
node_match=None,
|
391 |
+
edge_match=None,
|
392 |
+
node_subst_cost=None,
|
393 |
+
node_del_cost=None,
|
394 |
+
node_ins_cost=None,
|
395 |
+
edge_subst_cost=None,
|
396 |
+
edge_del_cost=None,
|
397 |
+
edge_ins_cost=None,
|
398 |
+
upper_bound=None,
|
399 |
+
):
|
400 |
+
"""Returns consecutive approximations of GED (graph edit distance)
|
401 |
+
between graphs G1 and G2.
|
402 |
+
|
403 |
+
Graph edit distance is a graph similarity measure analogous to
|
404 |
+
Levenshtein distance for strings. It is defined as minimum cost
|
405 |
+
of edit path (sequence of node and edge edit operations)
|
406 |
+
transforming graph G1 to graph isomorphic to G2.
|
407 |
+
|
408 |
+
Parameters
|
409 |
+
----------
|
410 |
+
G1, G2: graphs
|
411 |
+
The two graphs G1 and G2 must be of the same type.
|
412 |
+
|
413 |
+
node_match : callable
|
414 |
+
A function that returns True if node n1 in G1 and n2 in G2
|
415 |
+
should be considered equal during matching.
|
416 |
+
|
417 |
+
The function will be called like
|
418 |
+
|
419 |
+
node_match(G1.nodes[n1], G2.nodes[n2]).
|
420 |
+
|
421 |
+
That is, the function will receive the node attribute
|
422 |
+
dictionaries for n1 and n2 as inputs.
|
423 |
+
|
424 |
+
Ignored if node_subst_cost is specified. If neither
|
425 |
+
node_match nor node_subst_cost are specified then node
|
426 |
+
attributes are not considered.
|
427 |
+
|
428 |
+
edge_match : callable
|
429 |
+
A function that returns True if the edge attribute dictionaries
|
430 |
+
for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
|
431 |
+
be considered equal during matching.
|
432 |
+
|
433 |
+
The function will be called like
|
434 |
+
|
435 |
+
edge_match(G1[u1][v1], G2[u2][v2]).
|
436 |
+
|
437 |
+
That is, the function will receive the edge attribute
|
438 |
+
dictionaries of the edges under consideration.
|
439 |
+
|
440 |
+
Ignored if edge_subst_cost is specified. If neither
|
441 |
+
edge_match nor edge_subst_cost are specified then edge
|
442 |
+
attributes are not considered.
|
443 |
+
|
444 |
+
node_subst_cost, node_del_cost, node_ins_cost : callable
|
445 |
+
Functions that return the costs of node substitution, node
|
446 |
+
deletion, and node insertion, respectively.
|
447 |
+
|
448 |
+
The functions will be called like
|
449 |
+
|
450 |
+
node_subst_cost(G1.nodes[n1], G2.nodes[n2]),
|
451 |
+
node_del_cost(G1.nodes[n1]),
|
452 |
+
node_ins_cost(G2.nodes[n2]).
|
453 |
+
|
454 |
+
That is, the functions will receive the node attribute
|
455 |
+
dictionaries as inputs. The functions are expected to return
|
456 |
+
positive numeric values.
|
457 |
+
|
458 |
+
Function node_subst_cost overrides node_match if specified.
|
459 |
+
If neither node_match nor node_subst_cost are specified then
|
460 |
+
default node substitution cost of 0 is used (node attributes
|
461 |
+
are not considered during matching).
|
462 |
+
|
463 |
+
If node_del_cost is not specified then default node deletion
|
464 |
+
cost of 1 is used. If node_ins_cost is not specified then
|
465 |
+
default node insertion cost of 1 is used.
|
466 |
+
|
467 |
+
edge_subst_cost, edge_del_cost, edge_ins_cost : callable
|
468 |
+
Functions that return the costs of edge substitution, edge
|
469 |
+
deletion, and edge insertion, respectively.
|
470 |
+
|
471 |
+
The functions will be called like
|
472 |
+
|
473 |
+
edge_subst_cost(G1[u1][v1], G2[u2][v2]),
|
474 |
+
edge_del_cost(G1[u1][v1]),
|
475 |
+
edge_ins_cost(G2[u2][v2]).
|
476 |
+
|
477 |
+
That is, the functions will receive the edge attribute
|
478 |
+
dictionaries as inputs. The functions are expected to return
|
479 |
+
positive numeric values.
|
480 |
+
|
481 |
+
Function edge_subst_cost overrides edge_match if specified.
|
482 |
+
If neither edge_match nor edge_subst_cost are specified then
|
483 |
+
default edge substitution cost of 0 is used (edge attributes
|
484 |
+
are not considered during matching).
|
485 |
+
|
486 |
+
If edge_del_cost is not specified then default edge deletion
|
487 |
+
cost of 1 is used. If edge_ins_cost is not specified then
|
488 |
+
default edge insertion cost of 1 is used.
|
489 |
+
|
490 |
+
upper_bound : numeric
|
491 |
+
Maximum edit distance to consider.
|
492 |
+
|
493 |
+
Returns
|
494 |
+
-------
|
495 |
+
Generator of consecutive approximations of graph edit distance.
|
496 |
+
|
497 |
+
Examples
|
498 |
+
--------
|
499 |
+
>>> G1 = nx.cycle_graph(6)
|
500 |
+
>>> G2 = nx.wheel_graph(7)
|
501 |
+
>>> for v in nx.optimize_graph_edit_distance(G1, G2):
|
502 |
+
... minv = v
|
503 |
+
>>> minv
|
504 |
+
7.0
|
505 |
+
|
506 |
+
See Also
|
507 |
+
--------
|
508 |
+
graph_edit_distance, optimize_edit_paths
|
509 |
+
|
510 |
+
References
|
511 |
+
----------
|
512 |
+
.. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick
|
513 |
+
Martineau. An Exact Graph Edit Distance Algorithm for Solving
|
514 |
+
Pattern Recognition Problems. 4th International Conference on
|
515 |
+
Pattern Recognition Applications and Methods 2015, Jan 2015,
|
516 |
+
Lisbon, Portugal. 2015,
|
517 |
+
<10.5220/0005209202710278>. <hal-01168816>
|
518 |
+
https://hal.archives-ouvertes.fr/hal-01168816
|
519 |
+
"""
|
520 |
+
for _, _, cost in optimize_edit_paths(
|
521 |
+
G1,
|
522 |
+
G2,
|
523 |
+
node_match,
|
524 |
+
edge_match,
|
525 |
+
node_subst_cost,
|
526 |
+
node_del_cost,
|
527 |
+
node_ins_cost,
|
528 |
+
edge_subst_cost,
|
529 |
+
edge_del_cost,
|
530 |
+
edge_ins_cost,
|
531 |
+
upper_bound,
|
532 |
+
True,
|
533 |
+
):
|
534 |
+
yield cost
|
535 |
+
|
536 |
+
|
537 |
+
@nx._dispatchable(
|
538 |
+
graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True
|
539 |
+
)
|
540 |
+
def optimize_edit_paths(
|
541 |
+
G1,
|
542 |
+
G2,
|
543 |
+
node_match=None,
|
544 |
+
edge_match=None,
|
545 |
+
node_subst_cost=None,
|
546 |
+
node_del_cost=None,
|
547 |
+
node_ins_cost=None,
|
548 |
+
edge_subst_cost=None,
|
549 |
+
edge_del_cost=None,
|
550 |
+
edge_ins_cost=None,
|
551 |
+
upper_bound=None,
|
552 |
+
strictly_decreasing=True,
|
553 |
+
roots=None,
|
554 |
+
timeout=None,
|
555 |
+
):
|
556 |
+
"""GED (graph edit distance) calculation: advanced interface.
|
557 |
+
|
558 |
+
Graph edit path is a sequence of node and edge edit operations
|
559 |
+
transforming graph G1 to graph isomorphic to G2. Edit operations
|
560 |
+
include substitutions, deletions, and insertions.
|
561 |
+
|
562 |
+
Graph edit distance is defined as minimum cost of edit path.
|
563 |
+
|
564 |
+
Parameters
|
565 |
+
----------
|
566 |
+
G1, G2: graphs
|
567 |
+
The two graphs G1 and G2 must be of the same type.
|
568 |
+
|
569 |
+
node_match : callable
|
570 |
+
A function that returns True if node n1 in G1 and n2 in G2
|
571 |
+
should be considered equal during matching.
|
572 |
+
|
573 |
+
The function will be called like
|
574 |
+
|
575 |
+
node_match(G1.nodes[n1], G2.nodes[n2]).
|
576 |
+
|
577 |
+
That is, the function will receive the node attribute
|
578 |
+
dictionaries for n1 and n2 as inputs.
|
579 |
+
|
580 |
+
Ignored if node_subst_cost is specified. If neither
|
581 |
+
node_match nor node_subst_cost are specified then node
|
582 |
+
attributes are not considered.
|
583 |
+
|
584 |
+
edge_match : callable
|
585 |
+
A function that returns True if the edge attribute dictionaries
|
586 |
+
for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
|
587 |
+
be considered equal during matching.
|
588 |
+
|
589 |
+
The function will be called like
|
590 |
+
|
591 |
+
edge_match(G1[u1][v1], G2[u2][v2]).
|
592 |
+
|
593 |
+
That is, the function will receive the edge attribute
|
594 |
+
dictionaries of the edges under consideration.
|
595 |
+
|
596 |
+
Ignored if edge_subst_cost is specified. If neither
|
597 |
+
edge_match nor edge_subst_cost are specified then edge
|
598 |
+
attributes are not considered.
|
599 |
+
|
600 |
+
node_subst_cost, node_del_cost, node_ins_cost : callable
|
601 |
+
Functions that return the costs of node substitution, node
|
602 |
+
deletion, and node insertion, respectively.
|
603 |
+
|
604 |
+
The functions will be called like
|
605 |
+
|
606 |
+
node_subst_cost(G1.nodes[n1], G2.nodes[n2]),
|
607 |
+
node_del_cost(G1.nodes[n1]),
|
608 |
+
node_ins_cost(G2.nodes[n2]).
|
609 |
+
|
610 |
+
That is, the functions will receive the node attribute
|
611 |
+
dictionaries as inputs. The functions are expected to return
|
612 |
+
positive numeric values.
|
613 |
+
|
614 |
+
Function node_subst_cost overrides node_match if specified.
|
615 |
+
If neither node_match nor node_subst_cost are specified then
|
616 |
+
default node substitution cost of 0 is used (node attributes
|
617 |
+
are not considered during matching).
|
618 |
+
|
619 |
+
If node_del_cost is not specified then default node deletion
|
620 |
+
cost of 1 is used. If node_ins_cost is not specified then
|
621 |
+
default node insertion cost of 1 is used.
|
622 |
+
|
623 |
+
edge_subst_cost, edge_del_cost, edge_ins_cost : callable
|
624 |
+
Functions that return the costs of edge substitution, edge
|
625 |
+
deletion, and edge insertion, respectively.
|
626 |
+
|
627 |
+
The functions will be called like
|
628 |
+
|
629 |
+
edge_subst_cost(G1[u1][v1], G2[u2][v2]),
|
630 |
+
edge_del_cost(G1[u1][v1]),
|
631 |
+
edge_ins_cost(G2[u2][v2]).
|
632 |
+
|
633 |
+
That is, the functions will receive the edge attribute
|
634 |
+
dictionaries as inputs. The functions are expected to return
|
635 |
+
positive numeric values.
|
636 |
+
|
637 |
+
Function edge_subst_cost overrides edge_match if specified.
|
638 |
+
If neither edge_match nor edge_subst_cost are specified then
|
639 |
+
default edge substitution cost of 0 is used (edge attributes
|
640 |
+
are not considered during matching).
|
641 |
+
|
642 |
+
If edge_del_cost is not specified then default edge deletion
|
643 |
+
cost of 1 is used. If edge_ins_cost is not specified then
|
644 |
+
default edge insertion cost of 1 is used.
|
645 |
+
|
646 |
+
upper_bound : numeric
|
647 |
+
Maximum edit distance to consider.
|
648 |
+
|
649 |
+
strictly_decreasing : bool
|
650 |
+
If True, return consecutive approximations of strictly
|
651 |
+
decreasing cost. Otherwise, return all edit paths of cost
|
652 |
+
less than or equal to the previous minimum cost.
|
653 |
+
|
654 |
+
roots : 2-tuple
|
655 |
+
Tuple where first element is a node in G1 and the second
|
656 |
+
is a node in G2.
|
657 |
+
These nodes are forced to be matched in the comparison to
|
658 |
+
allow comparison between rooted graphs.
|
659 |
+
|
660 |
+
timeout : numeric
|
661 |
+
Maximum number of seconds to execute.
|
662 |
+
After timeout is met, the current best GED is returned.
|
663 |
+
|
664 |
+
Returns
|
665 |
+
-------
|
666 |
+
Generator of tuples (node_edit_path, edge_edit_path, cost)
|
667 |
+
node_edit_path : list of tuples (u, v)
|
668 |
+
edge_edit_path : list of tuples ((u1, v1), (u2, v2))
|
669 |
+
cost : numeric
|
670 |
+
|
671 |
+
See Also
|
672 |
+
--------
|
673 |
+
graph_edit_distance, optimize_graph_edit_distance, optimal_edit_paths
|
674 |
+
|
675 |
+
References
|
676 |
+
----------
|
677 |
+
.. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick
|
678 |
+
Martineau. An Exact Graph Edit Distance Algorithm for Solving
|
679 |
+
Pattern Recognition Problems. 4th International Conference on
|
680 |
+
Pattern Recognition Applications and Methods 2015, Jan 2015,
|
681 |
+
Lisbon, Portugal. 2015,
|
682 |
+
<10.5220/0005209202710278>. <hal-01168816>
|
683 |
+
https://hal.archives-ouvertes.fr/hal-01168816
|
684 |
+
|
685 |
+
"""
|
686 |
+
# TODO: support DiGraph
|
687 |
+
|
688 |
+
import numpy as np
|
689 |
+
import scipy as sp
|
690 |
+
|
691 |
+
@dataclass
|
692 |
+
class CostMatrix:
|
693 |
+
C: ...
|
694 |
+
lsa_row_ind: ...
|
695 |
+
lsa_col_ind: ...
|
696 |
+
ls: ...
|
697 |
+
|
698 |
+
def make_CostMatrix(C, m, n):
|
699 |
+
# assert(C.shape == (m + n, m + n))
|
700 |
+
lsa_row_ind, lsa_col_ind = sp.optimize.linear_sum_assignment(C)
|
701 |
+
|
702 |
+
# Fixup dummy assignments:
|
703 |
+
# each substitution i<->j should have dummy assignment m+j<->n+i
|
704 |
+
# NOTE: fast reduce of Cv relies on it
|
705 |
+
# assert len(lsa_row_ind) == len(lsa_col_ind)
|
706 |
+
indexes = zip(range(len(lsa_row_ind)), lsa_row_ind, lsa_col_ind)
|
707 |
+
subst_ind = [k for k, i, j in indexes if i < m and j < n]
|
708 |
+
indexes = zip(range(len(lsa_row_ind)), lsa_row_ind, lsa_col_ind)
|
709 |
+
dummy_ind = [k for k, i, j in indexes if i >= m and j >= n]
|
710 |
+
# assert len(subst_ind) == len(dummy_ind)
|
711 |
+
lsa_row_ind[dummy_ind] = lsa_col_ind[subst_ind] + m
|
712 |
+
lsa_col_ind[dummy_ind] = lsa_row_ind[subst_ind] + n
|
713 |
+
|
714 |
+
return CostMatrix(
|
715 |
+
C, lsa_row_ind, lsa_col_ind, C[lsa_row_ind, lsa_col_ind].sum()
|
716 |
+
)
|
717 |
+
|
718 |
+
def extract_C(C, i, j, m, n):
|
719 |
+
# assert(C.shape == (m + n, m + n))
|
720 |
+
row_ind = [k in i or k - m in j for k in range(m + n)]
|
721 |
+
col_ind = [k in j or k - n in i for k in range(m + n)]
|
722 |
+
return C[row_ind, :][:, col_ind]
|
723 |
+
|
724 |
+
def reduce_C(C, i, j, m, n):
|
725 |
+
# assert(C.shape == (m + n, m + n))
|
726 |
+
row_ind = [k not in i and k - m not in j for k in range(m + n)]
|
727 |
+
col_ind = [k not in j and k - n not in i for k in range(m + n)]
|
728 |
+
return C[row_ind, :][:, col_ind]
|
729 |
+
|
730 |
+
def reduce_ind(ind, i):
|
731 |
+
# assert set(ind) == set(range(len(ind)))
|
732 |
+
rind = ind[[k not in i for k in ind]]
|
733 |
+
for k in set(i):
|
734 |
+
rind[rind >= k] -= 1
|
735 |
+
return rind
|
736 |
+
|
737 |
+
def match_edges(u, v, pending_g, pending_h, Ce, matched_uv=None):
|
738 |
+
"""
|
739 |
+
Parameters:
|
740 |
+
u, v: matched vertices, u=None or v=None for
|
741 |
+
deletion/insertion
|
742 |
+
pending_g, pending_h: lists of edges not yet mapped
|
743 |
+
Ce: CostMatrix of pending edge mappings
|
744 |
+
matched_uv: partial vertex edit path
|
745 |
+
list of tuples (u, v) of previously matched vertex
|
746 |
+
mappings u<->v, u=None or v=None for
|
747 |
+
deletion/insertion
|
748 |
+
|
749 |
+
Returns:
|
750 |
+
list of (i, j): indices of edge mappings g<->h
|
751 |
+
localCe: local CostMatrix of edge mappings
|
752 |
+
(basically submatrix of Ce at cross of rows i, cols j)
|
753 |
+
"""
|
754 |
+
M = len(pending_g)
|
755 |
+
N = len(pending_h)
|
756 |
+
# assert Ce.C.shape == (M + N, M + N)
|
757 |
+
|
758 |
+
# only attempt to match edges after one node match has been made
|
759 |
+
# this will stop self-edges on the first node being automatically deleted
|
760 |
+
# even when a substitution is the better option
|
761 |
+
if matched_uv is None or len(matched_uv) == 0:
|
762 |
+
g_ind = []
|
763 |
+
h_ind = []
|
764 |
+
else:
|
765 |
+
g_ind = [
|
766 |
+
i
|
767 |
+
for i in range(M)
|
768 |
+
if pending_g[i][:2] == (u, u)
|
769 |
+
or any(
|
770 |
+
pending_g[i][:2] in ((p, u), (u, p), (p, p)) for p, q in matched_uv
|
771 |
+
)
|
772 |
+
]
|
773 |
+
h_ind = [
|
774 |
+
j
|
775 |
+
for j in range(N)
|
776 |
+
if pending_h[j][:2] == (v, v)
|
777 |
+
or any(
|
778 |
+
pending_h[j][:2] in ((q, v), (v, q), (q, q)) for p, q in matched_uv
|
779 |
+
)
|
780 |
+
]
|
781 |
+
|
782 |
+
m = len(g_ind)
|
783 |
+
n = len(h_ind)
|
784 |
+
|
785 |
+
if m or n:
|
786 |
+
C = extract_C(Ce.C, g_ind, h_ind, M, N)
|
787 |
+
# assert C.shape == (m + n, m + n)
|
788 |
+
|
789 |
+
# Forbid structurally invalid matches
|
790 |
+
# NOTE: inf remembered from Ce construction
|
791 |
+
for k, i in enumerate(g_ind):
|
792 |
+
g = pending_g[i][:2]
|
793 |
+
for l, j in enumerate(h_ind):
|
794 |
+
h = pending_h[j][:2]
|
795 |
+
if nx.is_directed(G1) or nx.is_directed(G2):
|
796 |
+
if any(
|
797 |
+
g == (p, u) and h == (q, v) or g == (u, p) and h == (v, q)
|
798 |
+
for p, q in matched_uv
|
799 |
+
):
|
800 |
+
continue
|
801 |
+
else:
|
802 |
+
if any(
|
803 |
+
g in ((p, u), (u, p)) and h in ((q, v), (v, q))
|
804 |
+
for p, q in matched_uv
|
805 |
+
):
|
806 |
+
continue
|
807 |
+
if g == (u, u) or any(g == (p, p) for p, q in matched_uv):
|
808 |
+
continue
|
809 |
+
if h == (v, v) or any(h == (q, q) for p, q in matched_uv):
|
810 |
+
continue
|
811 |
+
C[k, l] = inf
|
812 |
+
|
813 |
+
localCe = make_CostMatrix(C, m, n)
|
814 |
+
ij = [
|
815 |
+
(
|
816 |
+
g_ind[k] if k < m else M + h_ind[l],
|
817 |
+
h_ind[l] if l < n else N + g_ind[k],
|
818 |
+
)
|
819 |
+
for k, l in zip(localCe.lsa_row_ind, localCe.lsa_col_ind)
|
820 |
+
if k < m or l < n
|
821 |
+
]
|
822 |
+
|
823 |
+
else:
|
824 |
+
ij = []
|
825 |
+
localCe = CostMatrix(np.empty((0, 0)), [], [], 0)
|
826 |
+
|
827 |
+
return ij, localCe
|
828 |
+
|
829 |
+
def reduce_Ce(Ce, ij, m, n):
|
830 |
+
if len(ij):
|
831 |
+
i, j = zip(*ij)
|
832 |
+
m_i = m - sum(1 for t in i if t < m)
|
833 |
+
n_j = n - sum(1 for t in j if t < n)
|
834 |
+
return make_CostMatrix(reduce_C(Ce.C, i, j, m, n), m_i, n_j)
|
835 |
+
return Ce
|
836 |
+
|
837 |
+
def get_edit_ops(
|
838 |
+
matched_uv, pending_u, pending_v, Cv, pending_g, pending_h, Ce, matched_cost
|
839 |
+
):
|
840 |
+
"""
|
841 |
+
Parameters:
|
842 |
+
matched_uv: partial vertex edit path
|
843 |
+
list of tuples (u, v) of vertex mappings u<->v,
|
844 |
+
u=None or v=None for deletion/insertion
|
845 |
+
pending_u, pending_v: lists of vertices not yet mapped
|
846 |
+
Cv: CostMatrix of pending vertex mappings
|
847 |
+
pending_g, pending_h: lists of edges not yet mapped
|
848 |
+
Ce: CostMatrix of pending edge mappings
|
849 |
+
matched_cost: cost of partial edit path
|
850 |
+
|
851 |
+
Returns:
|
852 |
+
sequence of
|
853 |
+
(i, j): indices of vertex mapping u<->v
|
854 |
+
Cv_ij: reduced CostMatrix of pending vertex mappings
|
855 |
+
(basically Cv with row i, col j removed)
|
856 |
+
list of (x, y): indices of edge mappings g<->h
|
857 |
+
Ce_xy: reduced CostMatrix of pending edge mappings
|
858 |
+
(basically Ce with rows x, cols y removed)
|
859 |
+
cost: total cost of edit operation
|
860 |
+
NOTE: most promising ops first
|
861 |
+
"""
|
862 |
+
m = len(pending_u)
|
863 |
+
n = len(pending_v)
|
864 |
+
# assert Cv.C.shape == (m + n, m + n)
|
865 |
+
|
866 |
+
# 1) a vertex mapping from optimal linear sum assignment
|
867 |
+
i, j = min(
|
868 |
+
(k, l) for k, l in zip(Cv.lsa_row_ind, Cv.lsa_col_ind) if k < m or l < n
|
869 |
+
)
|
870 |
+
xy, localCe = match_edges(
|
871 |
+
pending_u[i] if i < m else None,
|
872 |
+
pending_v[j] if j < n else None,
|
873 |
+
pending_g,
|
874 |
+
pending_h,
|
875 |
+
Ce,
|
876 |
+
matched_uv,
|
877 |
+
)
|
878 |
+
Ce_xy = reduce_Ce(Ce, xy, len(pending_g), len(pending_h))
|
879 |
+
# assert Ce.ls <= localCe.ls + Ce_xy.ls
|
880 |
+
if prune(matched_cost + Cv.ls + localCe.ls + Ce_xy.ls):
|
881 |
+
pass
|
882 |
+
else:
|
883 |
+
# get reduced Cv efficiently
|
884 |
+
Cv_ij = CostMatrix(
|
885 |
+
reduce_C(Cv.C, (i,), (j,), m, n),
|
886 |
+
reduce_ind(Cv.lsa_row_ind, (i, m + j)),
|
887 |
+
reduce_ind(Cv.lsa_col_ind, (j, n + i)),
|
888 |
+
Cv.ls - Cv.C[i, j],
|
889 |
+
)
|
890 |
+
yield (i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls
|
891 |
+
|
892 |
+
# 2) other candidates, sorted by lower-bound cost estimate
|
893 |
+
other = []
|
894 |
+
fixed_i, fixed_j = i, j
|
895 |
+
if m <= n:
|
896 |
+
candidates = (
|
897 |
+
(t, fixed_j)
|
898 |
+
for t in range(m + n)
|
899 |
+
if t != fixed_i and (t < m or t == m + fixed_j)
|
900 |
+
)
|
901 |
+
else:
|
902 |
+
candidates = (
|
903 |
+
(fixed_i, t)
|
904 |
+
for t in range(m + n)
|
905 |
+
if t != fixed_j and (t < n or t == n + fixed_i)
|
906 |
+
)
|
907 |
+
for i, j in candidates:
|
908 |
+
if prune(matched_cost + Cv.C[i, j] + Ce.ls):
|
909 |
+
continue
|
910 |
+
Cv_ij = make_CostMatrix(
|
911 |
+
reduce_C(Cv.C, (i,), (j,), m, n),
|
912 |
+
m - 1 if i < m else m,
|
913 |
+
n - 1 if j < n else n,
|
914 |
+
)
|
915 |
+
# assert Cv.ls <= Cv.C[i, j] + Cv_ij.ls
|
916 |
+
if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + Ce.ls):
|
917 |
+
continue
|
918 |
+
xy, localCe = match_edges(
|
919 |
+
pending_u[i] if i < m else None,
|
920 |
+
pending_v[j] if j < n else None,
|
921 |
+
pending_g,
|
922 |
+
pending_h,
|
923 |
+
Ce,
|
924 |
+
matched_uv,
|
925 |
+
)
|
926 |
+
if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + localCe.ls):
|
927 |
+
continue
|
928 |
+
Ce_xy = reduce_Ce(Ce, xy, len(pending_g), len(pending_h))
|
929 |
+
# assert Ce.ls <= localCe.ls + Ce_xy.ls
|
930 |
+
if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + localCe.ls + Ce_xy.ls):
|
931 |
+
continue
|
932 |
+
other.append(((i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls))
|
933 |
+
|
934 |
+
yield from sorted(other, key=lambda t: t[4] + t[1].ls + t[3].ls)
|
935 |
+
|
936 |
+
def get_edit_paths(
|
937 |
+
matched_uv,
|
938 |
+
pending_u,
|
939 |
+
pending_v,
|
940 |
+
Cv,
|
941 |
+
matched_gh,
|
942 |
+
pending_g,
|
943 |
+
pending_h,
|
944 |
+
Ce,
|
945 |
+
matched_cost,
|
946 |
+
):
|
947 |
+
"""
|
948 |
+
Parameters:
|
949 |
+
matched_uv: partial vertex edit path
|
950 |
+
list of tuples (u, v) of vertex mappings u<->v,
|
951 |
+
u=None or v=None for deletion/insertion
|
952 |
+
pending_u, pending_v: lists of vertices not yet mapped
|
953 |
+
Cv: CostMatrix of pending vertex mappings
|
954 |
+
matched_gh: partial edge edit path
|
955 |
+
list of tuples (g, h) of edge mappings g<->h,
|
956 |
+
g=None or h=None for deletion/insertion
|
957 |
+
pending_g, pending_h: lists of edges not yet mapped
|
958 |
+
Ce: CostMatrix of pending edge mappings
|
959 |
+
matched_cost: cost of partial edit path
|
960 |
+
|
961 |
+
Returns:
|
962 |
+
sequence of (vertex_path, edge_path, cost)
|
963 |
+
vertex_path: complete vertex edit path
|
964 |
+
list of tuples (u, v) of vertex mappings u<->v,
|
965 |
+
u=None or v=None for deletion/insertion
|
966 |
+
edge_path: complete edge edit path
|
967 |
+
list of tuples (g, h) of edge mappings g<->h,
|
968 |
+
g=None or h=None for deletion/insertion
|
969 |
+
cost: total cost of edit path
|
970 |
+
NOTE: path costs are non-increasing
|
971 |
+
"""
|
972 |
+
# debug_print('matched-uv:', matched_uv)
|
973 |
+
# debug_print('matched-gh:', matched_gh)
|
974 |
+
# debug_print('matched-cost:', matched_cost)
|
975 |
+
# debug_print('pending-u:', pending_u)
|
976 |
+
# debug_print('pending-v:', pending_v)
|
977 |
+
# debug_print(Cv.C)
|
978 |
+
# assert list(sorted(G1.nodes)) == list(sorted(list(u for u, v in matched_uv if u is not None) + pending_u))
|
979 |
+
# assert list(sorted(G2.nodes)) == list(sorted(list(v for u, v in matched_uv if v is not None) + pending_v))
|
980 |
+
# debug_print('pending-g:', pending_g)
|
981 |
+
# debug_print('pending-h:', pending_h)
|
982 |
+
# debug_print(Ce.C)
|
983 |
+
# assert list(sorted(G1.edges)) == list(sorted(list(g for g, h in matched_gh if g is not None) + pending_g))
|
984 |
+
# assert list(sorted(G2.edges)) == list(sorted(list(h for g, h in matched_gh if h is not None) + pending_h))
|
985 |
+
# debug_print()
|
986 |
+
|
987 |
+
if prune(matched_cost + Cv.ls + Ce.ls):
|
988 |
+
return
|
989 |
+
|
990 |
+
if not max(len(pending_u), len(pending_v)):
|
991 |
+
# assert not len(pending_g)
|
992 |
+
# assert not len(pending_h)
|
993 |
+
# path completed!
|
994 |
+
# assert matched_cost <= maxcost_value
|
995 |
+
nonlocal maxcost_value
|
996 |
+
maxcost_value = min(maxcost_value, matched_cost)
|
997 |
+
yield matched_uv, matched_gh, matched_cost
|
998 |
+
|
999 |
+
else:
|
1000 |
+
edit_ops = get_edit_ops(
|
1001 |
+
matched_uv,
|
1002 |
+
pending_u,
|
1003 |
+
pending_v,
|
1004 |
+
Cv,
|
1005 |
+
pending_g,
|
1006 |
+
pending_h,
|
1007 |
+
Ce,
|
1008 |
+
matched_cost,
|
1009 |
+
)
|
1010 |
+
for ij, Cv_ij, xy, Ce_xy, edit_cost in edit_ops:
|
1011 |
+
i, j = ij
|
1012 |
+
# assert Cv.C[i, j] + sum(Ce.C[t] for t in xy) == edit_cost
|
1013 |
+
if prune(matched_cost + edit_cost + Cv_ij.ls + Ce_xy.ls):
|
1014 |
+
continue
|
1015 |
+
|
1016 |
+
# dive deeper
|
1017 |
+
u = pending_u.pop(i) if i < len(pending_u) else None
|
1018 |
+
v = pending_v.pop(j) if j < len(pending_v) else None
|
1019 |
+
matched_uv.append((u, v))
|
1020 |
+
for x, y in xy:
|
1021 |
+
len_g = len(pending_g)
|
1022 |
+
len_h = len(pending_h)
|
1023 |
+
matched_gh.append(
|
1024 |
+
(
|
1025 |
+
pending_g[x] if x < len_g else None,
|
1026 |
+
pending_h[y] if y < len_h else None,
|
1027 |
+
)
|
1028 |
+
)
|
1029 |
+
sortedx = sorted(x for x, y in xy)
|
1030 |
+
sortedy = sorted(y for x, y in xy)
|
1031 |
+
G = [
|
1032 |
+
(pending_g.pop(x) if x < len(pending_g) else None)
|
1033 |
+
for x in reversed(sortedx)
|
1034 |
+
]
|
1035 |
+
H = [
|
1036 |
+
(pending_h.pop(y) if y < len(pending_h) else None)
|
1037 |
+
for y in reversed(sortedy)
|
1038 |
+
]
|
1039 |
+
|
1040 |
+
yield from get_edit_paths(
|
1041 |
+
matched_uv,
|
1042 |
+
pending_u,
|
1043 |
+
pending_v,
|
1044 |
+
Cv_ij,
|
1045 |
+
matched_gh,
|
1046 |
+
pending_g,
|
1047 |
+
pending_h,
|
1048 |
+
Ce_xy,
|
1049 |
+
matched_cost + edit_cost,
|
1050 |
+
)
|
1051 |
+
|
1052 |
+
# backtrack
|
1053 |
+
if u is not None:
|
1054 |
+
pending_u.insert(i, u)
|
1055 |
+
if v is not None:
|
1056 |
+
pending_v.insert(j, v)
|
1057 |
+
matched_uv.pop()
|
1058 |
+
for x, g in zip(sortedx, reversed(G)):
|
1059 |
+
if g is not None:
|
1060 |
+
pending_g.insert(x, g)
|
1061 |
+
for y, h in zip(sortedy, reversed(H)):
|
1062 |
+
if h is not None:
|
1063 |
+
pending_h.insert(y, h)
|
1064 |
+
for _ in xy:
|
1065 |
+
matched_gh.pop()
|
1066 |
+
|
1067 |
+
# Initialization
|
1068 |
+
|
1069 |
+
pending_u = list(G1.nodes)
|
1070 |
+
pending_v = list(G2.nodes)
|
1071 |
+
|
1072 |
+
initial_cost = 0
|
1073 |
+
if roots:
|
1074 |
+
root_u, root_v = roots
|
1075 |
+
if root_u not in pending_u or root_v not in pending_v:
|
1076 |
+
raise nx.NodeNotFound("Root node not in graph.")
|
1077 |
+
|
1078 |
+
# remove roots from pending
|
1079 |
+
pending_u.remove(root_u)
|
1080 |
+
pending_v.remove(root_v)
|
1081 |
+
|
1082 |
+
# cost matrix of vertex mappings
|
1083 |
+
m = len(pending_u)
|
1084 |
+
n = len(pending_v)
|
1085 |
+
C = np.zeros((m + n, m + n))
|
1086 |
+
if node_subst_cost:
|
1087 |
+
C[0:m, 0:n] = np.array(
|
1088 |
+
[
|
1089 |
+
node_subst_cost(G1.nodes[u], G2.nodes[v])
|
1090 |
+
for u in pending_u
|
1091 |
+
for v in pending_v
|
1092 |
+
]
|
1093 |
+
).reshape(m, n)
|
1094 |
+
if roots:
|
1095 |
+
initial_cost = node_subst_cost(G1.nodes[root_u], G2.nodes[root_v])
|
1096 |
+
elif node_match:
|
1097 |
+
C[0:m, 0:n] = np.array(
|
1098 |
+
[
|
1099 |
+
1 - int(node_match(G1.nodes[u], G2.nodes[v]))
|
1100 |
+
for u in pending_u
|
1101 |
+
for v in pending_v
|
1102 |
+
]
|
1103 |
+
).reshape(m, n)
|
1104 |
+
if roots:
|
1105 |
+
initial_cost = 1 - node_match(G1.nodes[root_u], G2.nodes[root_v])
|
1106 |
+
else:
|
1107 |
+
# all zeroes
|
1108 |
+
pass
|
1109 |
+
# assert not min(m, n) or C[0:m, 0:n].min() >= 0
|
1110 |
+
if node_del_cost:
|
1111 |
+
del_costs = [node_del_cost(G1.nodes[u]) for u in pending_u]
|
1112 |
+
else:
|
1113 |
+
del_costs = [1] * len(pending_u)
|
1114 |
+
# assert not m or min(del_costs) >= 0
|
1115 |
+
if node_ins_cost:
|
1116 |
+
ins_costs = [node_ins_cost(G2.nodes[v]) for v in pending_v]
|
1117 |
+
else:
|
1118 |
+
ins_costs = [1] * len(pending_v)
|
1119 |
+
# assert not n or min(ins_costs) >= 0
|
1120 |
+
inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1
|
1121 |
+
C[0:m, n : n + m] = np.array(
|
1122 |
+
[del_costs[i] if i == j else inf for i in range(m) for j in range(m)]
|
1123 |
+
).reshape(m, m)
|
1124 |
+
C[m : m + n, 0:n] = np.array(
|
1125 |
+
[ins_costs[i] if i == j else inf for i in range(n) for j in range(n)]
|
1126 |
+
).reshape(n, n)
|
1127 |
+
Cv = make_CostMatrix(C, m, n)
|
1128 |
+
# debug_print(f"Cv: {m} x {n}")
|
1129 |
+
# debug_print(Cv.C)
|
1130 |
+
|
1131 |
+
pending_g = list(G1.edges)
|
1132 |
+
pending_h = list(G2.edges)
|
1133 |
+
|
1134 |
+
# cost matrix of edge mappings
|
1135 |
+
m = len(pending_g)
|
1136 |
+
n = len(pending_h)
|
1137 |
+
C = np.zeros((m + n, m + n))
|
1138 |
+
if edge_subst_cost:
|
1139 |
+
C[0:m, 0:n] = np.array(
|
1140 |
+
[
|
1141 |
+
edge_subst_cost(G1.edges[g], G2.edges[h])
|
1142 |
+
for g in pending_g
|
1143 |
+
for h in pending_h
|
1144 |
+
]
|
1145 |
+
).reshape(m, n)
|
1146 |
+
elif edge_match:
|
1147 |
+
C[0:m, 0:n] = np.array(
|
1148 |
+
[
|
1149 |
+
1 - int(edge_match(G1.edges[g], G2.edges[h]))
|
1150 |
+
for g in pending_g
|
1151 |
+
for h in pending_h
|
1152 |
+
]
|
1153 |
+
).reshape(m, n)
|
1154 |
+
else:
|
1155 |
+
# all zeroes
|
1156 |
+
pass
|
1157 |
+
# assert not min(m, n) or C[0:m, 0:n].min() >= 0
|
1158 |
+
if edge_del_cost:
|
1159 |
+
del_costs = [edge_del_cost(G1.edges[g]) for g in pending_g]
|
1160 |
+
else:
|
1161 |
+
del_costs = [1] * len(pending_g)
|
1162 |
+
# assert not m or min(del_costs) >= 0
|
1163 |
+
if edge_ins_cost:
|
1164 |
+
ins_costs = [edge_ins_cost(G2.edges[h]) for h in pending_h]
|
1165 |
+
else:
|
1166 |
+
ins_costs = [1] * len(pending_h)
|
1167 |
+
# assert not n or min(ins_costs) >= 0
|
1168 |
+
inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1
|
1169 |
+
C[0:m, n : n + m] = np.array(
|
1170 |
+
[del_costs[i] if i == j else inf for i in range(m) for j in range(m)]
|
1171 |
+
).reshape(m, m)
|
1172 |
+
C[m : m + n, 0:n] = np.array(
|
1173 |
+
[ins_costs[i] if i == j else inf for i in range(n) for j in range(n)]
|
1174 |
+
).reshape(n, n)
|
1175 |
+
Ce = make_CostMatrix(C, m, n)
|
1176 |
+
# debug_print(f'Ce: {m} x {n}')
|
1177 |
+
# debug_print(Ce.C)
|
1178 |
+
# debug_print()
|
1179 |
+
|
1180 |
+
maxcost_value = Cv.C.sum() + Ce.C.sum() + 1
|
1181 |
+
|
1182 |
+
if timeout is not None:
|
1183 |
+
if timeout <= 0:
|
1184 |
+
raise nx.NetworkXError("Timeout value must be greater than 0")
|
1185 |
+
start = time.perf_counter()
|
1186 |
+
|
1187 |
+
def prune(cost):
|
1188 |
+
if timeout is not None:
|
1189 |
+
if time.perf_counter() - start > timeout:
|
1190 |
+
return True
|
1191 |
+
if upper_bound is not None:
|
1192 |
+
if cost > upper_bound:
|
1193 |
+
return True
|
1194 |
+
if cost > maxcost_value:
|
1195 |
+
return True
|
1196 |
+
if strictly_decreasing and cost >= maxcost_value:
|
1197 |
+
return True
|
1198 |
+
return False
|
1199 |
+
|
1200 |
+
# Now go!
|
1201 |
+
|
1202 |
+
done_uv = [] if roots is None else [roots]
|
1203 |
+
|
1204 |
+
for vertex_path, edge_path, cost in get_edit_paths(
|
1205 |
+
done_uv, pending_u, pending_v, Cv, [], pending_g, pending_h, Ce, initial_cost
|
1206 |
+
):
|
1207 |
+
# assert sorted(G1.nodes) == sorted(u for u, v in vertex_path if u is not None)
|
1208 |
+
# assert sorted(G2.nodes) == sorted(v for u, v in vertex_path if v is not None)
|
1209 |
+
# assert sorted(G1.edges) == sorted(g for g, h in edge_path if g is not None)
|
1210 |
+
# assert sorted(G2.edges) == sorted(h for g, h in edge_path if h is not None)
|
1211 |
+
# print(vertex_path, edge_path, cost, file = sys.stderr)
|
1212 |
+
# assert cost == maxcost_value
|
1213 |
+
yield list(vertex_path), list(edge_path), float(cost)
|
1214 |
+
|
1215 |
+
|
1216 |
+
@nx._dispatchable
|
1217 |
+
def simrank_similarity(
|
1218 |
+
G,
|
1219 |
+
source=None,
|
1220 |
+
target=None,
|
1221 |
+
importance_factor=0.9,
|
1222 |
+
max_iterations=1000,
|
1223 |
+
tolerance=1e-4,
|
1224 |
+
):
|
1225 |
+
"""Returns the SimRank similarity of nodes in the graph ``G``.
|
1226 |
+
|
1227 |
+
SimRank is a similarity metric that says "two objects are considered
|
1228 |
+
to be similar if they are referenced by similar objects." [1]_.
|
1229 |
+
|
1230 |
+
The pseudo-code definition from the paper is::
|
1231 |
+
|
1232 |
+
def simrank(G, u, v):
|
1233 |
+
in_neighbors_u = G.predecessors(u)
|
1234 |
+
in_neighbors_v = G.predecessors(v)
|
1235 |
+
scale = C / (len(in_neighbors_u) * len(in_neighbors_v))
|
1236 |
+
return scale * sum(
|
1237 |
+
simrank(G, w, x) for w, x in product(in_neighbors_u, in_neighbors_v)
|
1238 |
+
)
|
1239 |
+
|
1240 |
+
where ``G`` is the graph, ``u`` is the source, ``v`` is the target,
|
1241 |
+
and ``C`` is a float decay or importance factor between 0 and 1.
|
1242 |
+
|
1243 |
+
The SimRank algorithm for determining node similarity is defined in
|
1244 |
+
[2]_.
|
1245 |
+
|
1246 |
+
Parameters
|
1247 |
+
----------
|
1248 |
+
G : NetworkX graph
|
1249 |
+
A NetworkX graph
|
1250 |
+
|
1251 |
+
source : node
|
1252 |
+
If this is specified, the returned dictionary maps each node
|
1253 |
+
``v`` in the graph to the similarity between ``source`` and
|
1254 |
+
``v``.
|
1255 |
+
|
1256 |
+
target : node
|
1257 |
+
If both ``source`` and ``target`` are specified, the similarity
|
1258 |
+
value between ``source`` and ``target`` is returned. If
|
1259 |
+
``target`` is specified but ``source`` is not, this argument is
|
1260 |
+
ignored.
|
1261 |
+
|
1262 |
+
importance_factor : float
|
1263 |
+
The relative importance of indirect neighbors with respect to
|
1264 |
+
direct neighbors.
|
1265 |
+
|
1266 |
+
max_iterations : integer
|
1267 |
+
Maximum number of iterations.
|
1268 |
+
|
1269 |
+
tolerance : float
|
1270 |
+
Error tolerance used to check convergence. When an iteration of
|
1271 |
+
the algorithm finds that no similarity value changes more than
|
1272 |
+
this amount, the algorithm halts.
|
1273 |
+
|
1274 |
+
Returns
|
1275 |
+
-------
|
1276 |
+
similarity : dictionary or float
|
1277 |
+
If ``source`` and ``target`` are both ``None``, this returns a
|
1278 |
+
dictionary of dictionaries, where keys are node pairs and value
|
1279 |
+
are similarity of the pair of nodes.
|
1280 |
+
|
1281 |
+
If ``source`` is not ``None`` but ``target`` is, this returns a
|
1282 |
+
dictionary mapping node to the similarity of ``source`` and that
|
1283 |
+
node.
|
1284 |
+
|
1285 |
+
If neither ``source`` nor ``target`` is ``None``, this returns
|
1286 |
+
the similarity value for the given pair of nodes.
|
1287 |
+
|
1288 |
+
Raises
|
1289 |
+
------
|
1290 |
+
ExceededMaxIterations
|
1291 |
+
If the algorithm does not converge within ``max_iterations``.
|
1292 |
+
|
1293 |
+
NodeNotFound
|
1294 |
+
If either ``source`` or ``target`` is not in `G`.
|
1295 |
+
|
1296 |
+
Examples
|
1297 |
+
--------
|
1298 |
+
>>> G = nx.cycle_graph(2)
|
1299 |
+
>>> nx.simrank_similarity(G)
|
1300 |
+
{0: {0: 1.0, 1: 0.0}, 1: {0: 0.0, 1: 1.0}}
|
1301 |
+
>>> nx.simrank_similarity(G, source=0)
|
1302 |
+
{0: 1.0, 1: 0.0}
|
1303 |
+
>>> nx.simrank_similarity(G, source=0, target=0)
|
1304 |
+
1.0
|
1305 |
+
|
1306 |
+
The result of this function can be converted to a numpy array
|
1307 |
+
representing the SimRank matrix by using the node order of the
|
1308 |
+
graph to determine which row and column represent each node.
|
1309 |
+
Other ordering of nodes is also possible.
|
1310 |
+
|
1311 |
+
>>> import numpy as np
|
1312 |
+
>>> sim = nx.simrank_similarity(G)
|
1313 |
+
>>> np.array([[sim[u][v] for v in G] for u in G])
|
1314 |
+
array([[1., 0.],
|
1315 |
+
[0., 1.]])
|
1316 |
+
>>> sim_1d = nx.simrank_similarity(G, source=0)
|
1317 |
+
>>> np.array([sim[0][v] for v in G])
|
1318 |
+
array([1., 0.])
|
1319 |
+
|
1320 |
+
References
|
1321 |
+
----------
|
1322 |
+
.. [1] https://en.wikipedia.org/wiki/SimRank
|
1323 |
+
.. [2] G. Jeh and J. Widom.
|
1324 |
+
"SimRank: a measure of structural-context similarity",
|
1325 |
+
In KDD'02: Proceedings of the Eighth ACM SIGKDD
|
1326 |
+
International Conference on Knowledge Discovery and Data Mining,
|
1327 |
+
pp. 538--543. ACM Press, 2002.
|
1328 |
+
"""
|
1329 |
+
import numpy as np
|
1330 |
+
|
1331 |
+
nodelist = list(G)
|
1332 |
+
if source is not None:
|
1333 |
+
if source not in nodelist:
|
1334 |
+
raise nx.NodeNotFound(f"Source node {source} not in G")
|
1335 |
+
else:
|
1336 |
+
s_indx = nodelist.index(source)
|
1337 |
+
else:
|
1338 |
+
s_indx = None
|
1339 |
+
|
1340 |
+
if target is not None:
|
1341 |
+
if target not in nodelist:
|
1342 |
+
raise nx.NodeNotFound(f"Target node {target} not in G")
|
1343 |
+
else:
|
1344 |
+
t_indx = nodelist.index(target)
|
1345 |
+
else:
|
1346 |
+
t_indx = None
|
1347 |
+
|
1348 |
+
x = _simrank_similarity_numpy(
|
1349 |
+
G, s_indx, t_indx, importance_factor, max_iterations, tolerance
|
1350 |
+
)
|
1351 |
+
|
1352 |
+
if isinstance(x, np.ndarray):
|
1353 |
+
if x.ndim == 1:
|
1354 |
+
return dict(zip(G, x.tolist()))
|
1355 |
+
# else x.ndim == 2
|
1356 |
+
return {u: dict(zip(G, row)) for u, row in zip(G, x.tolist())}
|
1357 |
+
return float(x)
|
1358 |
+
|
1359 |
+
|
1360 |
+
def _simrank_similarity_python(
|
1361 |
+
G,
|
1362 |
+
source=None,
|
1363 |
+
target=None,
|
1364 |
+
importance_factor=0.9,
|
1365 |
+
max_iterations=1000,
|
1366 |
+
tolerance=1e-4,
|
1367 |
+
):
|
1368 |
+
"""Returns the SimRank similarity of nodes in the graph ``G``.
|
1369 |
+
|
1370 |
+
This pure Python version is provided for pedagogical purposes.
|
1371 |
+
|
1372 |
+
Examples
|
1373 |
+
--------
|
1374 |
+
>>> G = nx.cycle_graph(2)
|
1375 |
+
>>> nx.similarity._simrank_similarity_python(G)
|
1376 |
+
{0: {0: 1, 1: 0.0}, 1: {0: 0.0, 1: 1}}
|
1377 |
+
>>> nx.similarity._simrank_similarity_python(G, source=0)
|
1378 |
+
{0: 1, 1: 0.0}
|
1379 |
+
>>> nx.similarity._simrank_similarity_python(G, source=0, target=0)
|
1380 |
+
1
|
1381 |
+
"""
|
1382 |
+
# build up our similarity adjacency dictionary output
|
1383 |
+
newsim = {u: {v: 1 if u == v else 0 for v in G} for u in G}
|
1384 |
+
|
1385 |
+
# These functions compute the update to the similarity value of the nodes
|
1386 |
+
# `u` and `v` with respect to the previous similarity values.
|
1387 |
+
def avg_sim(s):
|
1388 |
+
return sum(newsim[w][x] for (w, x) in s) / len(s) if s else 0.0
|
1389 |
+
|
1390 |
+
Gadj = G.pred if G.is_directed() else G.adj
|
1391 |
+
|
1392 |
+
def sim(u, v):
|
1393 |
+
return importance_factor * avg_sim(list(product(Gadj[u], Gadj[v])))
|
1394 |
+
|
1395 |
+
for its in range(max_iterations):
|
1396 |
+
oldsim = newsim
|
1397 |
+
newsim = {u: {v: sim(u, v) if u != v else 1 for v in G} for u in G}
|
1398 |
+
is_close = all(
|
1399 |
+
all(
|
1400 |
+
abs(newsim[u][v] - old) <= tolerance * (1 + abs(old))
|
1401 |
+
for v, old in nbrs.items()
|
1402 |
+
)
|
1403 |
+
for u, nbrs in oldsim.items()
|
1404 |
+
)
|
1405 |
+
if is_close:
|
1406 |
+
break
|
1407 |
+
|
1408 |
+
if its + 1 == max_iterations:
|
1409 |
+
raise nx.ExceededMaxIterations(
|
1410 |
+
f"simrank did not converge after {max_iterations} iterations."
|
1411 |
+
)
|
1412 |
+
|
1413 |
+
if source is not None and target is not None:
|
1414 |
+
return newsim[source][target]
|
1415 |
+
if source is not None:
|
1416 |
+
return newsim[source]
|
1417 |
+
return newsim
|
1418 |
+
|
1419 |
+
|
1420 |
+
def _simrank_similarity_numpy(
|
1421 |
+
G,
|
1422 |
+
source=None,
|
1423 |
+
target=None,
|
1424 |
+
importance_factor=0.9,
|
1425 |
+
max_iterations=1000,
|
1426 |
+
tolerance=1e-4,
|
1427 |
+
):
|
1428 |
+
"""Calculate SimRank of nodes in ``G`` using matrices with ``numpy``.
|
1429 |
+
|
1430 |
+
The SimRank algorithm for determining node similarity is defined in
|
1431 |
+
[1]_.
|
1432 |
+
|
1433 |
+
Parameters
|
1434 |
+
----------
|
1435 |
+
G : NetworkX graph
|
1436 |
+
A NetworkX graph
|
1437 |
+
|
1438 |
+
source : node
|
1439 |
+
If this is specified, the returned dictionary maps each node
|
1440 |
+
``v`` in the graph to the similarity between ``source`` and
|
1441 |
+
``v``.
|
1442 |
+
|
1443 |
+
target : node
|
1444 |
+
If both ``source`` and ``target`` are specified, the similarity
|
1445 |
+
value between ``source`` and ``target`` is returned. If
|
1446 |
+
``target`` is specified but ``source`` is not, this argument is
|
1447 |
+
ignored.
|
1448 |
+
|
1449 |
+
importance_factor : float
|
1450 |
+
The relative importance of indirect neighbors with respect to
|
1451 |
+
direct neighbors.
|
1452 |
+
|
1453 |
+
max_iterations : integer
|
1454 |
+
Maximum number of iterations.
|
1455 |
+
|
1456 |
+
tolerance : float
|
1457 |
+
Error tolerance used to check convergence. When an iteration of
|
1458 |
+
the algorithm finds that no similarity value changes more than
|
1459 |
+
this amount, the algorithm halts.
|
1460 |
+
|
1461 |
+
Returns
|
1462 |
+
-------
|
1463 |
+
similarity : numpy array or float
|
1464 |
+
If ``source`` and ``target`` are both ``None``, this returns a
|
1465 |
+
2D array containing SimRank scores of the nodes.
|
1466 |
+
|
1467 |
+
If ``source`` is not ``None`` but ``target`` is, this returns an
|
1468 |
+
1D array containing SimRank scores of ``source`` and that
|
1469 |
+
node.
|
1470 |
+
|
1471 |
+
If neither ``source`` nor ``target`` is ``None``, this returns
|
1472 |
+
the similarity value for the given pair of nodes.
|
1473 |
+
|
1474 |
+
Examples
|
1475 |
+
--------
|
1476 |
+
>>> G = nx.cycle_graph(2)
|
1477 |
+
>>> nx.similarity._simrank_similarity_numpy(G)
|
1478 |
+
array([[1., 0.],
|
1479 |
+
[0., 1.]])
|
1480 |
+
>>> nx.similarity._simrank_similarity_numpy(G, source=0)
|
1481 |
+
array([1., 0.])
|
1482 |
+
>>> nx.similarity._simrank_similarity_numpy(G, source=0, target=0)
|
1483 |
+
1.0
|
1484 |
+
|
1485 |
+
References
|
1486 |
+
----------
|
1487 |
+
.. [1] G. Jeh and J. Widom.
|
1488 |
+
"SimRank: a measure of structural-context similarity",
|
1489 |
+
In KDD'02: Proceedings of the Eighth ACM SIGKDD
|
1490 |
+
International Conference on Knowledge Discovery and Data Mining,
|
1491 |
+
pp. 538--543. ACM Press, 2002.
|
1492 |
+
"""
|
1493 |
+
# This algorithm follows roughly
|
1494 |
+
#
|
1495 |
+
# S = max{C * (A.T * S * A), I}
|
1496 |
+
#
|
1497 |
+
# where C is the importance factor, A is the column normalized
|
1498 |
+
# adjacency matrix, and I is the identity matrix.
|
1499 |
+
import numpy as np
|
1500 |
+
|
1501 |
+
adjacency_matrix = nx.to_numpy_array(G)
|
1502 |
+
|
1503 |
+
# column-normalize the ``adjacency_matrix``
|
1504 |
+
s = np.array(adjacency_matrix.sum(axis=0))
|
1505 |
+
s[s == 0] = 1
|
1506 |
+
adjacency_matrix /= s # adjacency_matrix.sum(axis=0)
|
1507 |
+
|
1508 |
+
newsim = np.eye(len(G), dtype=np.float64)
|
1509 |
+
for its in range(max_iterations):
|
1510 |
+
prevsim = newsim.copy()
|
1511 |
+
newsim = importance_factor * ((adjacency_matrix.T @ prevsim) @ adjacency_matrix)
|
1512 |
+
np.fill_diagonal(newsim, 1.0)
|
1513 |
+
|
1514 |
+
if np.allclose(prevsim, newsim, atol=tolerance):
|
1515 |
+
break
|
1516 |
+
|
1517 |
+
if its + 1 == max_iterations:
|
1518 |
+
raise nx.ExceededMaxIterations(
|
1519 |
+
f"simrank did not converge after {max_iterations} iterations."
|
1520 |
+
)
|
1521 |
+
|
1522 |
+
if source is not None and target is not None:
|
1523 |
+
return float(newsim[source, target])
|
1524 |
+
if source is not None:
|
1525 |
+
return newsim[source]
|
1526 |
+
return newsim
|
1527 |
+
|
1528 |
+
|
1529 |
+
@nx._dispatchable(edge_attrs="weight")
|
1530 |
+
def panther_similarity(
|
1531 |
+
G, source, k=5, path_length=5, c=0.5, delta=0.1, eps=None, weight="weight"
|
1532 |
+
):
|
1533 |
+
r"""Returns the Panther similarity of nodes in the graph `G` to node ``v``.
|
1534 |
+
|
1535 |
+
Panther is a similarity metric that says "two objects are considered
|
1536 |
+
to be similar if they frequently appear on the same paths." [1]_.
|
1537 |
+
|
1538 |
+
Parameters
|
1539 |
+
----------
|
1540 |
+
G : NetworkX graph
|
1541 |
+
A NetworkX graph
|
1542 |
+
source : node
|
1543 |
+
Source node for which to find the top `k` similar other nodes
|
1544 |
+
k : int (default = 5)
|
1545 |
+
The number of most similar nodes to return.
|
1546 |
+
path_length : int (default = 5)
|
1547 |
+
How long the randomly generated paths should be (``T`` in [1]_)
|
1548 |
+
c : float (default = 0.5)
|
1549 |
+
A universal positive constant used to scale the number
|
1550 |
+
of sample random paths to generate.
|
1551 |
+
delta : float (default = 0.1)
|
1552 |
+
The probability that the similarity $S$ is not an epsilon-approximation to (R, phi),
|
1553 |
+
where $R$ is the number of random paths and $\phi$ is the probability
|
1554 |
+
that an element sampled from a set $A \subseteq D$, where $D$ is the domain.
|
1555 |
+
eps : float or None (default = None)
|
1556 |
+
The error bound. Per [1]_, a good value is ``sqrt(1/|E|)``. Therefore,
|
1557 |
+
if no value is provided, the recommended computed value will be used.
|
1558 |
+
weight : string or None, optional (default="weight")
|
1559 |
+
The name of an edge attribute that holds the numerical value
|
1560 |
+
used as a weight. If None then each edge has weight 1.
|
1561 |
+
|
1562 |
+
Returns
|
1563 |
+
-------
|
1564 |
+
similarity : dictionary
|
1565 |
+
Dictionary of nodes to similarity scores (as floats). Note:
|
1566 |
+
the self-similarity (i.e., ``v``) will not be included in
|
1567 |
+
the returned dictionary. So, for ``k = 5``, a dictionary of
|
1568 |
+
top 4 nodes and their similarity scores will be returned.
|
1569 |
+
|
1570 |
+
Raises
|
1571 |
+
------
|
1572 |
+
NetworkXUnfeasible
|
1573 |
+
If `source` is an isolated node.
|
1574 |
+
|
1575 |
+
NodeNotFound
|
1576 |
+
If `source` is not in `G`.
|
1577 |
+
|
1578 |
+
Notes
|
1579 |
+
-----
|
1580 |
+
The isolated nodes in `G` are ignored.
|
1581 |
+
|
1582 |
+
Examples
|
1583 |
+
--------
|
1584 |
+
>>> G = nx.star_graph(10)
|
1585 |
+
>>> sim = nx.panther_similarity(G, 0)
|
1586 |
+
|
1587 |
+
References
|
1588 |
+
----------
|
1589 |
+
.. [1] Zhang, J., Tang, J., Ma, C., Tong, H., Jing, Y., & Li, J.
|
1590 |
+
Panther: Fast top-k similarity search on large networks.
|
1591 |
+
In Proceedings of the ACM SIGKDD International Conference
|
1592 |
+
on Knowledge Discovery and Data Mining (Vol. 2015-August, pp. 1445–1454).
|
1593 |
+
Association for Computing Machinery. https://doi.org/10.1145/2783258.2783267.
|
1594 |
+
"""
|
1595 |
+
import numpy as np
|
1596 |
+
|
1597 |
+
if source not in G:
|
1598 |
+
raise nx.NodeNotFound(f"Source node {source} not in G")
|
1599 |
+
|
1600 |
+
isolates = set(nx.isolates(G))
|
1601 |
+
|
1602 |
+
if source in isolates:
|
1603 |
+
raise nx.NetworkXUnfeasible(
|
1604 |
+
f"Panther similarity is not defined for the isolated source node {source}."
|
1605 |
+
)
|
1606 |
+
|
1607 |
+
G = G.subgraph([node for node in G.nodes if node not in isolates]).copy()
|
1608 |
+
|
1609 |
+
num_nodes = G.number_of_nodes()
|
1610 |
+
if num_nodes < k:
|
1611 |
+
warnings.warn(
|
1612 |
+
f"Number of nodes is {num_nodes}, but requested k is {k}. "
|
1613 |
+
"Setting k to number of nodes."
|
1614 |
+
)
|
1615 |
+
k = num_nodes
|
1616 |
+
# According to [1], they empirically determined
|
1617 |
+
# a good value for ``eps`` to be sqrt( 1 / |E| )
|
1618 |
+
if eps is None:
|
1619 |
+
eps = np.sqrt(1.0 / G.number_of_edges())
|
1620 |
+
|
1621 |
+
inv_node_map = {name: index for index, name in enumerate(G.nodes)}
|
1622 |
+
node_map = np.array(G)
|
1623 |
+
|
1624 |
+
# Calculate the sample size ``R`` for how many paths
|
1625 |
+
# to randomly generate
|
1626 |
+
t_choose_2 = math.comb(path_length, 2)
|
1627 |
+
sample_size = int((c / eps**2) * (np.log2(t_choose_2) + 1 + np.log(1 / delta)))
|
1628 |
+
index_map = {}
|
1629 |
+
_ = list(
|
1630 |
+
generate_random_paths(
|
1631 |
+
G, sample_size, path_length=path_length, index_map=index_map, weight=weight
|
1632 |
+
)
|
1633 |
+
)
|
1634 |
+
S = np.zeros(num_nodes)
|
1635 |
+
|
1636 |
+
inv_sample_size = 1 / sample_size
|
1637 |
+
|
1638 |
+
source_paths = set(index_map[source])
|
1639 |
+
|
1640 |
+
# Calculate the path similarities
|
1641 |
+
# between ``source`` (v) and ``node`` (v_j)
|
1642 |
+
# using our inverted index mapping of
|
1643 |
+
# vertices to paths
|
1644 |
+
for node, paths in index_map.items():
|
1645 |
+
# Only consider paths where both
|
1646 |
+
# ``node`` and ``source`` are present
|
1647 |
+
common_paths = source_paths.intersection(paths)
|
1648 |
+
S[inv_node_map[node]] = len(common_paths) * inv_sample_size
|
1649 |
+
|
1650 |
+
# Retrieve top ``k`` similar
|
1651 |
+
# Note: the below performed anywhere from 4-10x faster
|
1652 |
+
# (depending on input sizes) vs the equivalent ``np.argsort(S)[::-1]``
|
1653 |
+
top_k_unsorted = np.argpartition(S, -k)[-k:]
|
1654 |
+
top_k_sorted = top_k_unsorted[np.argsort(S[top_k_unsorted])][::-1]
|
1655 |
+
|
1656 |
+
# Add back the similarity scores
|
1657 |
+
top_k_with_val = dict(
|
1658 |
+
zip(node_map[top_k_sorted].tolist(), S[top_k_sorted].tolist())
|
1659 |
+
)
|
1660 |
+
|
1661 |
+
# Remove the self-similarity
|
1662 |
+
top_k_with_val.pop(source, None)
|
1663 |
+
return top_k_with_val
|
1664 |
+
|
1665 |
+
|
1666 |
+
@np_random_state(5)
|
1667 |
+
@nx._dispatchable(edge_attrs="weight")
|
1668 |
+
def generate_random_paths(
|
1669 |
+
G, sample_size, path_length=5, index_map=None, weight="weight", seed=None
|
1670 |
+
):
|
1671 |
+
"""Randomly generate `sample_size` paths of length `path_length`.
|
1672 |
+
|
1673 |
+
Parameters
|
1674 |
+
----------
|
1675 |
+
G : NetworkX graph
|
1676 |
+
A NetworkX graph
|
1677 |
+
sample_size : integer
|
1678 |
+
The number of paths to generate. This is ``R`` in [1]_.
|
1679 |
+
path_length : integer (default = 5)
|
1680 |
+
The maximum size of the path to randomly generate.
|
1681 |
+
This is ``T`` in [1]_. According to the paper, ``T >= 5`` is
|
1682 |
+
recommended.
|
1683 |
+
index_map : dictionary, optional
|
1684 |
+
If provided, this will be populated with the inverted
|
1685 |
+
index of nodes mapped to the set of generated random path
|
1686 |
+
indices within ``paths``.
|
1687 |
+
weight : string or None, optional (default="weight")
|
1688 |
+
The name of an edge attribute that holds the numerical value
|
1689 |
+
used as a weight. If None then each edge has weight 1.
|
1690 |
+
seed : integer, random_state, or None (default)
|
1691 |
+
Indicator of random number generation state.
|
1692 |
+
See :ref:`Randomness<randomness>`.
|
1693 |
+
|
1694 |
+
Returns
|
1695 |
+
-------
|
1696 |
+
paths : generator of lists
|
1697 |
+
Generator of `sample_size` paths each with length `path_length`.
|
1698 |
+
|
1699 |
+
Examples
|
1700 |
+
--------
|
1701 |
+
Note that the return value is the list of paths:
|
1702 |
+
|
1703 |
+
>>> G = nx.star_graph(3)
|
1704 |
+
>>> random_path = nx.generate_random_paths(G, 2)
|
1705 |
+
|
1706 |
+
By passing a dictionary into `index_map`, it will build an
|
1707 |
+
inverted index mapping of nodes to the paths in which that node is present:
|
1708 |
+
|
1709 |
+
>>> G = nx.star_graph(3)
|
1710 |
+
>>> index_map = {}
|
1711 |
+
>>> random_path = nx.generate_random_paths(G, 3, index_map=index_map)
|
1712 |
+
>>> paths_containing_node_0 = [
|
1713 |
+
... random_path[path_idx] for path_idx in index_map.get(0, [])
|
1714 |
+
... ]
|
1715 |
+
|
1716 |
+
References
|
1717 |
+
----------
|
1718 |
+
.. [1] Zhang, J., Tang, J., Ma, C., Tong, H., Jing, Y., & Li, J.
|
1719 |
+
Panther: Fast top-k similarity search on large networks.
|
1720 |
+
In Proceedings of the ACM SIGKDD International Conference
|
1721 |
+
on Knowledge Discovery and Data Mining (Vol. 2015-August, pp. 1445–1454).
|
1722 |
+
Association for Computing Machinery. https://doi.org/10.1145/2783258.2783267.
|
1723 |
+
"""
|
1724 |
+
import numpy as np
|
1725 |
+
|
1726 |
+
randint_fn = (
|
1727 |
+
seed.integers if isinstance(seed, np.random.Generator) else seed.randint
|
1728 |
+
)
|
1729 |
+
|
1730 |
+
# Calculate transition probabilities between
|
1731 |
+
# every pair of vertices according to Eq. (3)
|
1732 |
+
adj_mat = nx.to_numpy_array(G, weight=weight)
|
1733 |
+
inv_row_sums = np.reciprocal(adj_mat.sum(axis=1)).reshape(-1, 1)
|
1734 |
+
transition_probabilities = adj_mat * inv_row_sums
|
1735 |
+
|
1736 |
+
node_map = list(G)
|
1737 |
+
num_nodes = G.number_of_nodes()
|
1738 |
+
|
1739 |
+
for path_index in range(sample_size):
|
1740 |
+
# Sample current vertex v = v_i uniformly at random
|
1741 |
+
node_index = randint_fn(num_nodes)
|
1742 |
+
node = node_map[node_index]
|
1743 |
+
|
1744 |
+
# Add v into p_r and add p_r into the path set
|
1745 |
+
# of v, i.e., P_v
|
1746 |
+
path = [node]
|
1747 |
+
|
1748 |
+
# Build the inverted index (P_v) of vertices to paths
|
1749 |
+
if index_map is not None:
|
1750 |
+
if node in index_map:
|
1751 |
+
index_map[node].add(path_index)
|
1752 |
+
else:
|
1753 |
+
index_map[node] = {path_index}
|
1754 |
+
|
1755 |
+
starting_index = node_index
|
1756 |
+
for _ in range(path_length):
|
1757 |
+
# Randomly sample a neighbor (v_j) according
|
1758 |
+
# to transition probabilities from ``node`` (v) to its neighbors
|
1759 |
+
nbr_index = seed.choice(
|
1760 |
+
num_nodes, p=transition_probabilities[starting_index]
|
1761 |
+
)
|
1762 |
+
|
1763 |
+
# Set current vertex (v = v_j)
|
1764 |
+
starting_index = nbr_index
|
1765 |
+
|
1766 |
+
# Add v into p_r
|
1767 |
+
nbr_node = node_map[nbr_index]
|
1768 |
+
path.append(nbr_node)
|
1769 |
+
|
1770 |
+
# Add p_r into P_v
|
1771 |
+
if index_map is not None:
|
1772 |
+
if nbr_node in index_map:
|
1773 |
+
index_map[nbr_node].add(path_index)
|
1774 |
+
else:
|
1775 |
+
index_map[nbr_node] = {path_index}
|
1776 |
+
|
1777 |
+
yield path
|
venv/lib/python3.10/site-packages/networkx/algorithms/simple_paths.py
ADDED
@@ -0,0 +1,937 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from heapq import heappop, heappush
|
2 |
+
from itertools import count
|
3 |
+
|
4 |
+
import networkx as nx
|
5 |
+
from networkx.algorithms.shortest_paths.weighted import _weight_function
|
6 |
+
from networkx.utils import not_implemented_for, pairwise
|
7 |
+
|
8 |
+
__all__ = [
|
9 |
+
"all_simple_paths",
|
10 |
+
"is_simple_path",
|
11 |
+
"shortest_simple_paths",
|
12 |
+
"all_simple_edge_paths",
|
13 |
+
]
|
14 |
+
|
15 |
+
|
16 |
+
@nx._dispatchable
|
17 |
+
def is_simple_path(G, nodes):
|
18 |
+
"""Returns True if and only if `nodes` form a simple path in `G`.
|
19 |
+
|
20 |
+
A *simple path* in a graph is a nonempty sequence of nodes in which
|
21 |
+
no node appears more than once in the sequence, and each adjacent
|
22 |
+
pair of nodes in the sequence is adjacent in the graph.
|
23 |
+
|
24 |
+
Parameters
|
25 |
+
----------
|
26 |
+
G : graph
|
27 |
+
A NetworkX graph.
|
28 |
+
nodes : list
|
29 |
+
A list of one or more nodes in the graph `G`.
|
30 |
+
|
31 |
+
Returns
|
32 |
+
-------
|
33 |
+
bool
|
34 |
+
Whether the given list of nodes represents a simple path in `G`.
|
35 |
+
|
36 |
+
Notes
|
37 |
+
-----
|
38 |
+
An empty list of nodes is not a path but a list of one node is a
|
39 |
+
path. Here's an explanation why.
|
40 |
+
|
41 |
+
This function operates on *node paths*. One could also consider
|
42 |
+
*edge paths*. There is a bijection between node paths and edge
|
43 |
+
paths.
|
44 |
+
|
45 |
+
The *length of a path* is the number of edges in the path, so a list
|
46 |
+
of nodes of length *n* corresponds to a path of length *n* - 1.
|
47 |
+
Thus the smallest edge path would be a list of zero edges, the empty
|
48 |
+
path. This corresponds to a list of one node.
|
49 |
+
|
50 |
+
To convert between a node path and an edge path, you can use code
|
51 |
+
like the following::
|
52 |
+
|
53 |
+
>>> from networkx.utils import pairwise
|
54 |
+
>>> nodes = [0, 1, 2, 3]
|
55 |
+
>>> edges = list(pairwise(nodes))
|
56 |
+
>>> edges
|
57 |
+
[(0, 1), (1, 2), (2, 3)]
|
58 |
+
>>> nodes = [edges[0][0]] + [v for u, v in edges]
|
59 |
+
>>> nodes
|
60 |
+
[0, 1, 2, 3]
|
61 |
+
|
62 |
+
Examples
|
63 |
+
--------
|
64 |
+
>>> G = nx.cycle_graph(4)
|
65 |
+
>>> nx.is_simple_path(G, [2, 3, 0])
|
66 |
+
True
|
67 |
+
>>> nx.is_simple_path(G, [0, 2])
|
68 |
+
False
|
69 |
+
|
70 |
+
"""
|
71 |
+
# The empty list is not a valid path. Could also return
|
72 |
+
# NetworkXPointlessConcept here.
|
73 |
+
if len(nodes) == 0:
|
74 |
+
return False
|
75 |
+
|
76 |
+
# If the list is a single node, just check that the node is actually
|
77 |
+
# in the graph.
|
78 |
+
if len(nodes) == 1:
|
79 |
+
return nodes[0] in G
|
80 |
+
|
81 |
+
# check that all nodes in the list are in the graph, if at least one
|
82 |
+
# is not in the graph, then this is not a simple path
|
83 |
+
if not all(n in G for n in nodes):
|
84 |
+
return False
|
85 |
+
|
86 |
+
# If the list contains repeated nodes, then it's not a simple path
|
87 |
+
if len(set(nodes)) != len(nodes):
|
88 |
+
return False
|
89 |
+
|
90 |
+
# Test that each adjacent pair of nodes is adjacent.
|
91 |
+
return all(v in G[u] for u, v in pairwise(nodes))
|
92 |
+
|
93 |
+
|
94 |
+
@nx._dispatchable
|
95 |
+
def all_simple_paths(G, source, target, cutoff=None):
|
96 |
+
"""Generate all simple paths in the graph G from source to target.
|
97 |
+
|
98 |
+
A simple path is a path with no repeated nodes.
|
99 |
+
|
100 |
+
Parameters
|
101 |
+
----------
|
102 |
+
G : NetworkX graph
|
103 |
+
|
104 |
+
source : node
|
105 |
+
Starting node for path
|
106 |
+
|
107 |
+
target : nodes
|
108 |
+
Single node or iterable of nodes at which to end path
|
109 |
+
|
110 |
+
cutoff : integer, optional
|
111 |
+
Depth to stop the search. Only paths of length <= cutoff are returned.
|
112 |
+
|
113 |
+
Returns
|
114 |
+
-------
|
115 |
+
path_generator: generator
|
116 |
+
A generator that produces lists of simple paths. If there are no paths
|
117 |
+
between the source and target within the given cutoff the generator
|
118 |
+
produces no output. If it is possible to traverse the same sequence of
|
119 |
+
nodes in multiple ways, namely through parallel edges, then it will be
|
120 |
+
returned multiple times (once for each viable edge combination).
|
121 |
+
|
122 |
+
Examples
|
123 |
+
--------
|
124 |
+
This iterator generates lists of nodes::
|
125 |
+
|
126 |
+
>>> G = nx.complete_graph(4)
|
127 |
+
>>> for path in nx.all_simple_paths(G, source=0, target=3):
|
128 |
+
... print(path)
|
129 |
+
...
|
130 |
+
[0, 1, 2, 3]
|
131 |
+
[0, 1, 3]
|
132 |
+
[0, 2, 1, 3]
|
133 |
+
[0, 2, 3]
|
134 |
+
[0, 3]
|
135 |
+
|
136 |
+
You can generate only those paths that are shorter than a certain
|
137 |
+
length by using the `cutoff` keyword argument::
|
138 |
+
|
139 |
+
>>> paths = nx.all_simple_paths(G, source=0, target=3, cutoff=2)
|
140 |
+
>>> print(list(paths))
|
141 |
+
[[0, 1, 3], [0, 2, 3], [0, 3]]
|
142 |
+
|
143 |
+
To get each path as the corresponding list of edges, you can use the
|
144 |
+
:func:`networkx.utils.pairwise` helper function::
|
145 |
+
|
146 |
+
>>> paths = nx.all_simple_paths(G, source=0, target=3)
|
147 |
+
>>> for path in map(nx.utils.pairwise, paths):
|
148 |
+
... print(list(path))
|
149 |
+
[(0, 1), (1, 2), (2, 3)]
|
150 |
+
[(0, 1), (1, 3)]
|
151 |
+
[(0, 2), (2, 1), (1, 3)]
|
152 |
+
[(0, 2), (2, 3)]
|
153 |
+
[(0, 3)]
|
154 |
+
|
155 |
+
Pass an iterable of nodes as target to generate all paths ending in any of several nodes::
|
156 |
+
|
157 |
+
>>> G = nx.complete_graph(4)
|
158 |
+
>>> for path in nx.all_simple_paths(G, source=0, target=[3, 2]):
|
159 |
+
... print(path)
|
160 |
+
...
|
161 |
+
[0, 1, 2]
|
162 |
+
[0, 1, 2, 3]
|
163 |
+
[0, 1, 3]
|
164 |
+
[0, 1, 3, 2]
|
165 |
+
[0, 2]
|
166 |
+
[0, 2, 1, 3]
|
167 |
+
[0, 2, 3]
|
168 |
+
[0, 3]
|
169 |
+
[0, 3, 1, 2]
|
170 |
+
[0, 3, 2]
|
171 |
+
|
172 |
+
The singleton path from ``source`` to itself is considered a simple path and is
|
173 |
+
included in the results:
|
174 |
+
|
175 |
+
>>> G = nx.empty_graph(5)
|
176 |
+
>>> list(nx.all_simple_paths(G, source=0, target=0))
|
177 |
+
[[0]]
|
178 |
+
|
179 |
+
>>> G = nx.path_graph(3)
|
180 |
+
>>> list(nx.all_simple_paths(G, source=0, target={0, 1, 2}))
|
181 |
+
[[0], [0, 1], [0, 1, 2]]
|
182 |
+
|
183 |
+
Iterate over each path from the root nodes to the leaf nodes in a
|
184 |
+
directed acyclic graph using a functional programming approach::
|
185 |
+
|
186 |
+
>>> from itertools import chain
|
187 |
+
>>> from itertools import product
|
188 |
+
>>> from itertools import starmap
|
189 |
+
>>> from functools import partial
|
190 |
+
>>>
|
191 |
+
>>> chaini = chain.from_iterable
|
192 |
+
>>>
|
193 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (0, 3), (3, 2)])
|
194 |
+
>>> roots = (v for v, d in G.in_degree() if d == 0)
|
195 |
+
>>> leaves = (v for v, d in G.out_degree() if d == 0)
|
196 |
+
>>> all_paths = partial(nx.all_simple_paths, G)
|
197 |
+
>>> list(chaini(starmap(all_paths, product(roots, leaves))))
|
198 |
+
[[0, 1, 2], [0, 3, 2]]
|
199 |
+
|
200 |
+
The same list computed using an iterative approach::
|
201 |
+
|
202 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (0, 3), (3, 2)])
|
203 |
+
>>> roots = (v for v, d in G.in_degree() if d == 0)
|
204 |
+
>>> leaves = (v for v, d in G.out_degree() if d == 0)
|
205 |
+
>>> all_paths = []
|
206 |
+
>>> for root in roots:
|
207 |
+
... for leaf in leaves:
|
208 |
+
... paths = nx.all_simple_paths(G, root, leaf)
|
209 |
+
... all_paths.extend(paths)
|
210 |
+
>>> all_paths
|
211 |
+
[[0, 1, 2], [0, 3, 2]]
|
212 |
+
|
213 |
+
Iterate over each path from the root nodes to the leaf nodes in a
|
214 |
+
directed acyclic graph passing all leaves together to avoid unnecessary
|
215 |
+
compute::
|
216 |
+
|
217 |
+
>>> G = nx.DiGraph([(0, 1), (2, 1), (1, 3), (1, 4)])
|
218 |
+
>>> roots = (v for v, d in G.in_degree() if d == 0)
|
219 |
+
>>> leaves = [v for v, d in G.out_degree() if d == 0]
|
220 |
+
>>> all_paths = []
|
221 |
+
>>> for root in roots:
|
222 |
+
... paths = nx.all_simple_paths(G, root, leaves)
|
223 |
+
... all_paths.extend(paths)
|
224 |
+
>>> all_paths
|
225 |
+
[[0, 1, 3], [0, 1, 4], [2, 1, 3], [2, 1, 4]]
|
226 |
+
|
227 |
+
If parallel edges offer multiple ways to traverse a given sequence of
|
228 |
+
nodes, this sequence of nodes will be returned multiple times:
|
229 |
+
|
230 |
+
>>> G = nx.MultiDiGraph([(0, 1), (0, 1), (1, 2)])
|
231 |
+
>>> list(nx.all_simple_paths(G, 0, 2))
|
232 |
+
[[0, 1, 2], [0, 1, 2]]
|
233 |
+
|
234 |
+
Notes
|
235 |
+
-----
|
236 |
+
This algorithm uses a modified depth-first search to generate the
|
237 |
+
paths [1]_. A single path can be found in $O(V+E)$ time but the
|
238 |
+
number of simple paths in a graph can be very large, e.g. $O(n!)$ in
|
239 |
+
the complete graph of order $n$.
|
240 |
+
|
241 |
+
This function does not check that a path exists between `source` and
|
242 |
+
`target`. For large graphs, this may result in very long runtimes.
|
243 |
+
Consider using `has_path` to check that a path exists between `source` and
|
244 |
+
`target` before calling this function on large graphs.
|
245 |
+
|
246 |
+
References
|
247 |
+
----------
|
248 |
+
.. [1] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms",
|
249 |
+
Addison Wesley Professional, 3rd ed., 2001.
|
250 |
+
|
251 |
+
See Also
|
252 |
+
--------
|
253 |
+
all_shortest_paths, shortest_path, has_path
|
254 |
+
|
255 |
+
"""
|
256 |
+
for edge_path in all_simple_edge_paths(G, source, target, cutoff):
|
257 |
+
yield [source] + [edge[1] for edge in edge_path]
|
258 |
+
|
259 |
+
|
260 |
+
@nx._dispatchable
|
261 |
+
def all_simple_edge_paths(G, source, target, cutoff=None):
|
262 |
+
"""Generate lists of edges for all simple paths in G from source to target.
|
263 |
+
|
264 |
+
A simple path is a path with no repeated nodes.
|
265 |
+
|
266 |
+
Parameters
|
267 |
+
----------
|
268 |
+
G : NetworkX graph
|
269 |
+
|
270 |
+
source : node
|
271 |
+
Starting node for path
|
272 |
+
|
273 |
+
target : nodes
|
274 |
+
Single node or iterable of nodes at which to end path
|
275 |
+
|
276 |
+
cutoff : integer, optional
|
277 |
+
Depth to stop the search. Only paths of length <= cutoff are returned.
|
278 |
+
|
279 |
+
Returns
|
280 |
+
-------
|
281 |
+
path_generator: generator
|
282 |
+
A generator that produces lists of simple paths. If there are no paths
|
283 |
+
between the source and target within the given cutoff the generator
|
284 |
+
produces no output.
|
285 |
+
For multigraphs, the list of edges have elements of the form `(u,v,k)`.
|
286 |
+
Where `k` corresponds to the edge key.
|
287 |
+
|
288 |
+
Examples
|
289 |
+
--------
|
290 |
+
|
291 |
+
Print the simple path edges of a Graph::
|
292 |
+
|
293 |
+
>>> g = nx.Graph([(1, 2), (2, 4), (1, 3), (3, 4)])
|
294 |
+
>>> for path in sorted(nx.all_simple_edge_paths(g, 1, 4)):
|
295 |
+
... print(path)
|
296 |
+
[(1, 2), (2, 4)]
|
297 |
+
[(1, 3), (3, 4)]
|
298 |
+
|
299 |
+
Print the simple path edges of a MultiGraph. Returned edges come with
|
300 |
+
their associated keys::
|
301 |
+
|
302 |
+
>>> mg = nx.MultiGraph()
|
303 |
+
>>> mg.add_edge(1, 2, key="k0")
|
304 |
+
'k0'
|
305 |
+
>>> mg.add_edge(1, 2, key="k1")
|
306 |
+
'k1'
|
307 |
+
>>> mg.add_edge(2, 3, key="k0")
|
308 |
+
'k0'
|
309 |
+
>>> for path in sorted(nx.all_simple_edge_paths(mg, 1, 3)):
|
310 |
+
... print(path)
|
311 |
+
[(1, 2, 'k0'), (2, 3, 'k0')]
|
312 |
+
[(1, 2, 'k1'), (2, 3, 'k0')]
|
313 |
+
|
314 |
+
When ``source`` is one of the targets, the empty path starting and ending at
|
315 |
+
``source`` without traversing any edge is considered a valid simple edge path
|
316 |
+
and is included in the results:
|
317 |
+
|
318 |
+
>>> G = nx.Graph()
|
319 |
+
>>> G.add_node(0)
|
320 |
+
>>> paths = list(nx.all_simple_edge_paths(G, 0, 0))
|
321 |
+
>>> for path in paths:
|
322 |
+
... print(path)
|
323 |
+
[]
|
324 |
+
>>> len(paths)
|
325 |
+
1
|
326 |
+
|
327 |
+
|
328 |
+
Notes
|
329 |
+
-----
|
330 |
+
This algorithm uses a modified depth-first search to generate the
|
331 |
+
paths [1]_. A single path can be found in $O(V+E)$ time but the
|
332 |
+
number of simple paths in a graph can be very large, e.g. $O(n!)$ in
|
333 |
+
the complete graph of order $n$.
|
334 |
+
|
335 |
+
References
|
336 |
+
----------
|
337 |
+
.. [1] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms",
|
338 |
+
Addison Wesley Professional, 3rd ed., 2001.
|
339 |
+
|
340 |
+
See Also
|
341 |
+
--------
|
342 |
+
all_shortest_paths, shortest_path, all_simple_paths
|
343 |
+
|
344 |
+
"""
|
345 |
+
if source not in G:
|
346 |
+
raise nx.NodeNotFound(f"source node {source} not in graph")
|
347 |
+
|
348 |
+
if target in G:
|
349 |
+
targets = {target}
|
350 |
+
else:
|
351 |
+
try:
|
352 |
+
targets = set(target)
|
353 |
+
except TypeError as err:
|
354 |
+
raise nx.NodeNotFound(f"target node {target} not in graph") from err
|
355 |
+
|
356 |
+
cutoff = cutoff if cutoff is not None else len(G) - 1
|
357 |
+
|
358 |
+
if cutoff >= 0 and targets:
|
359 |
+
yield from _all_simple_edge_paths(G, source, targets, cutoff)
|
360 |
+
|
361 |
+
|
362 |
+
def _all_simple_edge_paths(G, source, targets, cutoff):
|
363 |
+
# We simulate recursion with a stack, keeping the current path being explored
|
364 |
+
# and the outgoing edge iterators at each point in the stack.
|
365 |
+
# To avoid unnecessary checks, the loop is structured in a way such that a path
|
366 |
+
# is considered for yielding only after a new node/edge is added.
|
367 |
+
# We bootstrap the search by adding a dummy iterator to the stack that only yields
|
368 |
+
# a dummy edge to source (so that the trivial path has a chance of being included).
|
369 |
+
|
370 |
+
get_edges = (
|
371 |
+
(lambda node: G.edges(node, keys=True))
|
372 |
+
if G.is_multigraph()
|
373 |
+
else (lambda node: G.edges(node))
|
374 |
+
)
|
375 |
+
|
376 |
+
# The current_path is a dictionary that maps nodes in the path to the edge that was
|
377 |
+
# used to enter that node (instead of a list of edges) because we want both a fast
|
378 |
+
# membership test for nodes in the path and the preservation of insertion order.
|
379 |
+
current_path = {None: None}
|
380 |
+
stack = [iter([(None, source)])]
|
381 |
+
|
382 |
+
while stack:
|
383 |
+
# 1. Try to extend the current path.
|
384 |
+
next_edge = next((e for e in stack[-1] if e[1] not in current_path), None)
|
385 |
+
if next_edge is None:
|
386 |
+
# All edges of the last node in the current path have been explored.
|
387 |
+
stack.pop()
|
388 |
+
current_path.popitem()
|
389 |
+
continue
|
390 |
+
previous_node, next_node, *_ = next_edge
|
391 |
+
|
392 |
+
# 2. Check if we've reached a target.
|
393 |
+
if next_node in targets:
|
394 |
+
yield (list(current_path.values()) + [next_edge])[2:] # remove dummy edge
|
395 |
+
|
396 |
+
# 3. Only expand the search through the next node if it makes sense.
|
397 |
+
if len(current_path) - 1 < cutoff and (
|
398 |
+
targets - current_path.keys() - {next_node}
|
399 |
+
):
|
400 |
+
current_path[next_node] = next_edge
|
401 |
+
stack.append(iter(get_edges(next_node)))
|
402 |
+
|
403 |
+
|
404 |
+
@not_implemented_for("multigraph")
|
405 |
+
@nx._dispatchable(edge_attrs="weight")
|
406 |
+
def shortest_simple_paths(G, source, target, weight=None):
|
407 |
+
"""Generate all simple paths in the graph G from source to target,
|
408 |
+
starting from shortest ones.
|
409 |
+
|
410 |
+
A simple path is a path with no repeated nodes.
|
411 |
+
|
412 |
+
If a weighted shortest path search is to be used, no negative weights
|
413 |
+
are allowed.
|
414 |
+
|
415 |
+
Parameters
|
416 |
+
----------
|
417 |
+
G : NetworkX graph
|
418 |
+
|
419 |
+
source : node
|
420 |
+
Starting node for path
|
421 |
+
|
422 |
+
target : node
|
423 |
+
Ending node for path
|
424 |
+
|
425 |
+
weight : string or function
|
426 |
+
If it is a string, it is the name of the edge attribute to be
|
427 |
+
used as a weight.
|
428 |
+
|
429 |
+
If it is a function, the weight of an edge is the value returned
|
430 |
+
by the function. The function must accept exactly three positional
|
431 |
+
arguments: the two endpoints of an edge and the dictionary of edge
|
432 |
+
attributes for that edge. The function must return a number.
|
433 |
+
|
434 |
+
If None all edges are considered to have unit weight. Default
|
435 |
+
value None.
|
436 |
+
|
437 |
+
Returns
|
438 |
+
-------
|
439 |
+
path_generator: generator
|
440 |
+
A generator that produces lists of simple paths, in order from
|
441 |
+
shortest to longest.
|
442 |
+
|
443 |
+
Raises
|
444 |
+
------
|
445 |
+
NetworkXNoPath
|
446 |
+
If no path exists between source and target.
|
447 |
+
|
448 |
+
NetworkXError
|
449 |
+
If source or target nodes are not in the input graph.
|
450 |
+
|
451 |
+
NetworkXNotImplemented
|
452 |
+
If the input graph is a Multi[Di]Graph.
|
453 |
+
|
454 |
+
Examples
|
455 |
+
--------
|
456 |
+
|
457 |
+
>>> G = nx.cycle_graph(7)
|
458 |
+
>>> paths = list(nx.shortest_simple_paths(G, 0, 3))
|
459 |
+
>>> print(paths)
|
460 |
+
[[0, 1, 2, 3], [0, 6, 5, 4, 3]]
|
461 |
+
|
462 |
+
You can use this function to efficiently compute the k shortest/best
|
463 |
+
paths between two nodes.
|
464 |
+
|
465 |
+
>>> from itertools import islice
|
466 |
+
>>> def k_shortest_paths(G, source, target, k, weight=None):
|
467 |
+
... return list(
|
468 |
+
... islice(nx.shortest_simple_paths(G, source, target, weight=weight), k)
|
469 |
+
... )
|
470 |
+
>>> for path in k_shortest_paths(G, 0, 3, 2):
|
471 |
+
... print(path)
|
472 |
+
[0, 1, 2, 3]
|
473 |
+
[0, 6, 5, 4, 3]
|
474 |
+
|
475 |
+
Notes
|
476 |
+
-----
|
477 |
+
This procedure is based on algorithm by Jin Y. Yen [1]_. Finding
|
478 |
+
the first $K$ paths requires $O(KN^3)$ operations.
|
479 |
+
|
480 |
+
See Also
|
481 |
+
--------
|
482 |
+
all_shortest_paths
|
483 |
+
shortest_path
|
484 |
+
all_simple_paths
|
485 |
+
|
486 |
+
References
|
487 |
+
----------
|
488 |
+
.. [1] Jin Y. Yen, "Finding the K Shortest Loopless Paths in a
|
489 |
+
Network", Management Science, Vol. 17, No. 11, Theory Series
|
490 |
+
(Jul., 1971), pp. 712-716.
|
491 |
+
|
492 |
+
"""
|
493 |
+
if source not in G:
|
494 |
+
raise nx.NodeNotFound(f"source node {source} not in graph")
|
495 |
+
|
496 |
+
if target not in G:
|
497 |
+
raise nx.NodeNotFound(f"target node {target} not in graph")
|
498 |
+
|
499 |
+
if weight is None:
|
500 |
+
length_func = len
|
501 |
+
shortest_path_func = _bidirectional_shortest_path
|
502 |
+
else:
|
503 |
+
wt = _weight_function(G, weight)
|
504 |
+
|
505 |
+
def length_func(path):
|
506 |
+
return sum(
|
507 |
+
wt(u, v, G.get_edge_data(u, v)) for (u, v) in zip(path, path[1:])
|
508 |
+
)
|
509 |
+
|
510 |
+
shortest_path_func = _bidirectional_dijkstra
|
511 |
+
|
512 |
+
listA = []
|
513 |
+
listB = PathBuffer()
|
514 |
+
prev_path = None
|
515 |
+
while True:
|
516 |
+
if not prev_path:
|
517 |
+
length, path = shortest_path_func(G, source, target, weight=weight)
|
518 |
+
listB.push(length, path)
|
519 |
+
else:
|
520 |
+
ignore_nodes = set()
|
521 |
+
ignore_edges = set()
|
522 |
+
for i in range(1, len(prev_path)):
|
523 |
+
root = prev_path[:i]
|
524 |
+
root_length = length_func(root)
|
525 |
+
for path in listA:
|
526 |
+
if path[:i] == root:
|
527 |
+
ignore_edges.add((path[i - 1], path[i]))
|
528 |
+
try:
|
529 |
+
length, spur = shortest_path_func(
|
530 |
+
G,
|
531 |
+
root[-1],
|
532 |
+
target,
|
533 |
+
ignore_nodes=ignore_nodes,
|
534 |
+
ignore_edges=ignore_edges,
|
535 |
+
weight=weight,
|
536 |
+
)
|
537 |
+
path = root[:-1] + spur
|
538 |
+
listB.push(root_length + length, path)
|
539 |
+
except nx.NetworkXNoPath:
|
540 |
+
pass
|
541 |
+
ignore_nodes.add(root[-1])
|
542 |
+
|
543 |
+
if listB:
|
544 |
+
path = listB.pop()
|
545 |
+
yield path
|
546 |
+
listA.append(path)
|
547 |
+
prev_path = path
|
548 |
+
else:
|
549 |
+
break
|
550 |
+
|
551 |
+
|
552 |
+
class PathBuffer:
|
553 |
+
def __init__(self):
|
554 |
+
self.paths = set()
|
555 |
+
self.sortedpaths = []
|
556 |
+
self.counter = count()
|
557 |
+
|
558 |
+
def __len__(self):
|
559 |
+
return len(self.sortedpaths)
|
560 |
+
|
561 |
+
def push(self, cost, path):
|
562 |
+
hashable_path = tuple(path)
|
563 |
+
if hashable_path not in self.paths:
|
564 |
+
heappush(self.sortedpaths, (cost, next(self.counter), path))
|
565 |
+
self.paths.add(hashable_path)
|
566 |
+
|
567 |
+
def pop(self):
|
568 |
+
(cost, num, path) = heappop(self.sortedpaths)
|
569 |
+
hashable_path = tuple(path)
|
570 |
+
self.paths.remove(hashable_path)
|
571 |
+
return path
|
572 |
+
|
573 |
+
|
574 |
+
def _bidirectional_shortest_path(
|
575 |
+
G, source, target, ignore_nodes=None, ignore_edges=None, weight=None
|
576 |
+
):
|
577 |
+
"""Returns the shortest path between source and target ignoring
|
578 |
+
nodes and edges in the containers ignore_nodes and ignore_edges.
|
579 |
+
|
580 |
+
This is a custom modification of the standard bidirectional shortest
|
581 |
+
path implementation at networkx.algorithms.unweighted
|
582 |
+
|
583 |
+
Parameters
|
584 |
+
----------
|
585 |
+
G : NetworkX graph
|
586 |
+
|
587 |
+
source : node
|
588 |
+
starting node for path
|
589 |
+
|
590 |
+
target : node
|
591 |
+
ending node for path
|
592 |
+
|
593 |
+
ignore_nodes : container of nodes
|
594 |
+
nodes to ignore, optional
|
595 |
+
|
596 |
+
ignore_edges : container of edges
|
597 |
+
edges to ignore, optional
|
598 |
+
|
599 |
+
weight : None
|
600 |
+
This function accepts a weight argument for convenience of
|
601 |
+
shortest_simple_paths function. It will be ignored.
|
602 |
+
|
603 |
+
Returns
|
604 |
+
-------
|
605 |
+
path: list
|
606 |
+
List of nodes in a path from source to target.
|
607 |
+
|
608 |
+
Raises
|
609 |
+
------
|
610 |
+
NetworkXNoPath
|
611 |
+
If no path exists between source and target.
|
612 |
+
|
613 |
+
See Also
|
614 |
+
--------
|
615 |
+
shortest_path
|
616 |
+
|
617 |
+
"""
|
618 |
+
# call helper to do the real work
|
619 |
+
results = _bidirectional_pred_succ(G, source, target, ignore_nodes, ignore_edges)
|
620 |
+
pred, succ, w = results
|
621 |
+
|
622 |
+
# build path from pred+w+succ
|
623 |
+
path = []
|
624 |
+
# from w to target
|
625 |
+
while w is not None:
|
626 |
+
path.append(w)
|
627 |
+
w = succ[w]
|
628 |
+
# from source to w
|
629 |
+
w = pred[path[0]]
|
630 |
+
while w is not None:
|
631 |
+
path.insert(0, w)
|
632 |
+
w = pred[w]
|
633 |
+
|
634 |
+
return len(path), path
|
635 |
+
|
636 |
+
|
637 |
+
def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges=None):
|
638 |
+
"""Bidirectional shortest path helper.
|
639 |
+
Returns (pred,succ,w) where
|
640 |
+
pred is a dictionary of predecessors from w to the source, and
|
641 |
+
succ is a dictionary of successors from w to the target.
|
642 |
+
"""
|
643 |
+
# does BFS from both source and target and meets in the middle
|
644 |
+
if ignore_nodes and (source in ignore_nodes or target in ignore_nodes):
|
645 |
+
raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
|
646 |
+
if target == source:
|
647 |
+
return ({target: None}, {source: None}, source)
|
648 |
+
|
649 |
+
# handle either directed or undirected
|
650 |
+
if G.is_directed():
|
651 |
+
Gpred = G.predecessors
|
652 |
+
Gsucc = G.successors
|
653 |
+
else:
|
654 |
+
Gpred = G.neighbors
|
655 |
+
Gsucc = G.neighbors
|
656 |
+
|
657 |
+
# support optional nodes filter
|
658 |
+
if ignore_nodes:
|
659 |
+
|
660 |
+
def filter_iter(nodes):
|
661 |
+
def iterate(v):
|
662 |
+
for w in nodes(v):
|
663 |
+
if w not in ignore_nodes:
|
664 |
+
yield w
|
665 |
+
|
666 |
+
return iterate
|
667 |
+
|
668 |
+
Gpred = filter_iter(Gpred)
|
669 |
+
Gsucc = filter_iter(Gsucc)
|
670 |
+
|
671 |
+
# support optional edges filter
|
672 |
+
if ignore_edges:
|
673 |
+
if G.is_directed():
|
674 |
+
|
675 |
+
def filter_pred_iter(pred_iter):
|
676 |
+
def iterate(v):
|
677 |
+
for w in pred_iter(v):
|
678 |
+
if (w, v) not in ignore_edges:
|
679 |
+
yield w
|
680 |
+
|
681 |
+
return iterate
|
682 |
+
|
683 |
+
def filter_succ_iter(succ_iter):
|
684 |
+
def iterate(v):
|
685 |
+
for w in succ_iter(v):
|
686 |
+
if (v, w) not in ignore_edges:
|
687 |
+
yield w
|
688 |
+
|
689 |
+
return iterate
|
690 |
+
|
691 |
+
Gpred = filter_pred_iter(Gpred)
|
692 |
+
Gsucc = filter_succ_iter(Gsucc)
|
693 |
+
|
694 |
+
else:
|
695 |
+
|
696 |
+
def filter_iter(nodes):
|
697 |
+
def iterate(v):
|
698 |
+
for w in nodes(v):
|
699 |
+
if (v, w) not in ignore_edges and (w, v) not in ignore_edges:
|
700 |
+
yield w
|
701 |
+
|
702 |
+
return iterate
|
703 |
+
|
704 |
+
Gpred = filter_iter(Gpred)
|
705 |
+
Gsucc = filter_iter(Gsucc)
|
706 |
+
|
707 |
+
# predecessor and successors in search
|
708 |
+
pred = {source: None}
|
709 |
+
succ = {target: None}
|
710 |
+
|
711 |
+
# initialize fringes, start with forward
|
712 |
+
forward_fringe = [source]
|
713 |
+
reverse_fringe = [target]
|
714 |
+
|
715 |
+
while forward_fringe and reverse_fringe:
|
716 |
+
if len(forward_fringe) <= len(reverse_fringe):
|
717 |
+
this_level = forward_fringe
|
718 |
+
forward_fringe = []
|
719 |
+
for v in this_level:
|
720 |
+
for w in Gsucc(v):
|
721 |
+
if w not in pred:
|
722 |
+
forward_fringe.append(w)
|
723 |
+
pred[w] = v
|
724 |
+
if w in succ:
|
725 |
+
# found path
|
726 |
+
return pred, succ, w
|
727 |
+
else:
|
728 |
+
this_level = reverse_fringe
|
729 |
+
reverse_fringe = []
|
730 |
+
for v in this_level:
|
731 |
+
for w in Gpred(v):
|
732 |
+
if w not in succ:
|
733 |
+
succ[w] = v
|
734 |
+
reverse_fringe.append(w)
|
735 |
+
if w in pred:
|
736 |
+
# found path
|
737 |
+
return pred, succ, w
|
738 |
+
|
739 |
+
raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
|
740 |
+
|
741 |
+
|
742 |
+
def _bidirectional_dijkstra(
|
743 |
+
G, source, target, weight="weight", ignore_nodes=None, ignore_edges=None
|
744 |
+
):
|
745 |
+
"""Dijkstra's algorithm for shortest paths using bidirectional search.
|
746 |
+
|
747 |
+
This function returns the shortest path between source and target
|
748 |
+
ignoring nodes and edges in the containers ignore_nodes and
|
749 |
+
ignore_edges.
|
750 |
+
|
751 |
+
This is a custom modification of the standard Dijkstra bidirectional
|
752 |
+
shortest path implementation at networkx.algorithms.weighted
|
753 |
+
|
754 |
+
Parameters
|
755 |
+
----------
|
756 |
+
G : NetworkX graph
|
757 |
+
|
758 |
+
source : node
|
759 |
+
Starting node.
|
760 |
+
|
761 |
+
target : node
|
762 |
+
Ending node.
|
763 |
+
|
764 |
+
weight: string, function, optional (default='weight')
|
765 |
+
Edge data key or weight function corresponding to the edge weight
|
766 |
+
|
767 |
+
ignore_nodes : container of nodes
|
768 |
+
nodes to ignore, optional
|
769 |
+
|
770 |
+
ignore_edges : container of edges
|
771 |
+
edges to ignore, optional
|
772 |
+
|
773 |
+
Returns
|
774 |
+
-------
|
775 |
+
length : number
|
776 |
+
Shortest path length.
|
777 |
+
|
778 |
+
Returns a tuple of two dictionaries keyed by node.
|
779 |
+
The first dictionary stores distance from the source.
|
780 |
+
The second stores the path from the source to that node.
|
781 |
+
|
782 |
+
Raises
|
783 |
+
------
|
784 |
+
NetworkXNoPath
|
785 |
+
If no path exists between source and target.
|
786 |
+
|
787 |
+
Notes
|
788 |
+
-----
|
789 |
+
Edge weight attributes must be numerical.
|
790 |
+
Distances are calculated as sums of weighted edges traversed.
|
791 |
+
|
792 |
+
In practice bidirectional Dijkstra is much more than twice as fast as
|
793 |
+
ordinary Dijkstra.
|
794 |
+
|
795 |
+
Ordinary Dijkstra expands nodes in a sphere-like manner from the
|
796 |
+
source. The radius of this sphere will eventually be the length
|
797 |
+
of the shortest path. Bidirectional Dijkstra will expand nodes
|
798 |
+
from both the source and the target, making two spheres of half
|
799 |
+
this radius. Volume of the first sphere is pi*r*r while the
|
800 |
+
others are 2*pi*r/2*r/2, making up half the volume.
|
801 |
+
|
802 |
+
This algorithm is not guaranteed to work if edge weights
|
803 |
+
are negative or are floating point numbers
|
804 |
+
(overflows and roundoff errors can cause problems).
|
805 |
+
|
806 |
+
See Also
|
807 |
+
--------
|
808 |
+
shortest_path
|
809 |
+
shortest_path_length
|
810 |
+
"""
|
811 |
+
if ignore_nodes and (source in ignore_nodes or target in ignore_nodes):
|
812 |
+
raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
|
813 |
+
if source == target:
|
814 |
+
if source not in G:
|
815 |
+
raise nx.NodeNotFound(f"Node {source} not in graph")
|
816 |
+
return (0, [source])
|
817 |
+
|
818 |
+
# handle either directed or undirected
|
819 |
+
if G.is_directed():
|
820 |
+
Gpred = G.predecessors
|
821 |
+
Gsucc = G.successors
|
822 |
+
else:
|
823 |
+
Gpred = G.neighbors
|
824 |
+
Gsucc = G.neighbors
|
825 |
+
|
826 |
+
# support optional nodes filter
|
827 |
+
if ignore_nodes:
|
828 |
+
|
829 |
+
def filter_iter(nodes):
|
830 |
+
def iterate(v):
|
831 |
+
for w in nodes(v):
|
832 |
+
if w not in ignore_nodes:
|
833 |
+
yield w
|
834 |
+
|
835 |
+
return iterate
|
836 |
+
|
837 |
+
Gpred = filter_iter(Gpred)
|
838 |
+
Gsucc = filter_iter(Gsucc)
|
839 |
+
|
840 |
+
# support optional edges filter
|
841 |
+
if ignore_edges:
|
842 |
+
if G.is_directed():
|
843 |
+
|
844 |
+
def filter_pred_iter(pred_iter):
|
845 |
+
def iterate(v):
|
846 |
+
for w in pred_iter(v):
|
847 |
+
if (w, v) not in ignore_edges:
|
848 |
+
yield w
|
849 |
+
|
850 |
+
return iterate
|
851 |
+
|
852 |
+
def filter_succ_iter(succ_iter):
|
853 |
+
def iterate(v):
|
854 |
+
for w in succ_iter(v):
|
855 |
+
if (v, w) not in ignore_edges:
|
856 |
+
yield w
|
857 |
+
|
858 |
+
return iterate
|
859 |
+
|
860 |
+
Gpred = filter_pred_iter(Gpred)
|
861 |
+
Gsucc = filter_succ_iter(Gsucc)
|
862 |
+
|
863 |
+
else:
|
864 |
+
|
865 |
+
def filter_iter(nodes):
|
866 |
+
def iterate(v):
|
867 |
+
for w in nodes(v):
|
868 |
+
if (v, w) not in ignore_edges and (w, v) not in ignore_edges:
|
869 |
+
yield w
|
870 |
+
|
871 |
+
return iterate
|
872 |
+
|
873 |
+
Gpred = filter_iter(Gpred)
|
874 |
+
Gsucc = filter_iter(Gsucc)
|
875 |
+
|
876 |
+
push = heappush
|
877 |
+
pop = heappop
|
878 |
+
# Init: Forward Backward
|
879 |
+
dists = [{}, {}] # dictionary of final distances
|
880 |
+
paths = [{source: [source]}, {target: [target]}] # dictionary of paths
|
881 |
+
fringe = [[], []] # heap of (distance, node) tuples for
|
882 |
+
# extracting next node to expand
|
883 |
+
seen = [{source: 0}, {target: 0}] # dictionary of distances to
|
884 |
+
# nodes seen
|
885 |
+
c = count()
|
886 |
+
# initialize fringe heap
|
887 |
+
push(fringe[0], (0, next(c), source))
|
888 |
+
push(fringe[1], (0, next(c), target))
|
889 |
+
# neighs for extracting correct neighbor information
|
890 |
+
neighs = [Gsucc, Gpred]
|
891 |
+
# variables to hold shortest discovered path
|
892 |
+
# finaldist = 1e30000
|
893 |
+
finalpath = []
|
894 |
+
dir = 1
|
895 |
+
while fringe[0] and fringe[1]:
|
896 |
+
# choose direction
|
897 |
+
# dir == 0 is forward direction and dir == 1 is back
|
898 |
+
dir = 1 - dir
|
899 |
+
# extract closest to expand
|
900 |
+
(dist, _, v) = pop(fringe[dir])
|
901 |
+
if v in dists[dir]:
|
902 |
+
# Shortest path to v has already been found
|
903 |
+
continue
|
904 |
+
# update distance
|
905 |
+
dists[dir][v] = dist # equal to seen[dir][v]
|
906 |
+
if v in dists[1 - dir]:
|
907 |
+
# if we have scanned v in both directions we are done
|
908 |
+
# we have now discovered the shortest path
|
909 |
+
return (finaldist, finalpath)
|
910 |
+
|
911 |
+
wt = _weight_function(G, weight)
|
912 |
+
for w in neighs[dir](v):
|
913 |
+
if dir == 0: # forward
|
914 |
+
minweight = wt(v, w, G.get_edge_data(v, w))
|
915 |
+
vwLength = dists[dir][v] + minweight
|
916 |
+
else: # back, must remember to change v,w->w,v
|
917 |
+
minweight = wt(w, v, G.get_edge_data(w, v))
|
918 |
+
vwLength = dists[dir][v] + minweight
|
919 |
+
|
920 |
+
if w in dists[dir]:
|
921 |
+
if vwLength < dists[dir][w]:
|
922 |
+
raise ValueError("Contradictory paths found: negative weights?")
|
923 |
+
elif w not in seen[dir] or vwLength < seen[dir][w]:
|
924 |
+
# relaxing
|
925 |
+
seen[dir][w] = vwLength
|
926 |
+
push(fringe[dir], (vwLength, next(c), w))
|
927 |
+
paths[dir][w] = paths[dir][v] + [w]
|
928 |
+
if w in seen[0] and w in seen[1]:
|
929 |
+
# see if this path is better than the already
|
930 |
+
# discovered shortest path
|
931 |
+
totaldist = seen[0][w] + seen[1][w]
|
932 |
+
if finalpath == [] or finaldist > totaldist:
|
933 |
+
finaldist = totaldist
|
934 |
+
revpath = paths[1][w][:]
|
935 |
+
revpath.reverse()
|
936 |
+
finalpath = paths[0][w] + revpath[1:]
|
937 |
+
raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
|
venv/lib/python3.10/site-packages/networkx/algorithms/smallworld.py
ADDED
@@ -0,0 +1,403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for estimating the small-world-ness of graphs.
|
2 |
+
|
3 |
+
A small world network is characterized by a small average shortest path length,
|
4 |
+
and a large clustering coefficient.
|
5 |
+
|
6 |
+
Small-worldness is commonly measured with the coefficient sigma or omega.
|
7 |
+
|
8 |
+
Both coefficients compare the average clustering coefficient and shortest path
|
9 |
+
length of a given graph against the same quantities for an equivalent random
|
10 |
+
or lattice graph.
|
11 |
+
|
12 |
+
For more information, see the Wikipedia article on small-world network [1]_.
|
13 |
+
|
14 |
+
.. [1] Small-world network:: https://en.wikipedia.org/wiki/Small-world_network
|
15 |
+
|
16 |
+
"""
|
17 |
+
import networkx as nx
|
18 |
+
from networkx.utils import not_implemented_for, py_random_state
|
19 |
+
|
20 |
+
__all__ = ["random_reference", "lattice_reference", "sigma", "omega"]
|
21 |
+
|
22 |
+
|
23 |
+
@not_implemented_for("directed")
|
24 |
+
@not_implemented_for("multigraph")
|
25 |
+
@py_random_state(3)
|
26 |
+
@nx._dispatchable(returns_graph=True)
|
27 |
+
def random_reference(G, niter=1, connectivity=True, seed=None):
|
28 |
+
"""Compute a random graph by swapping edges of a given graph.
|
29 |
+
|
30 |
+
Parameters
|
31 |
+
----------
|
32 |
+
G : graph
|
33 |
+
An undirected graph with 4 or more nodes.
|
34 |
+
|
35 |
+
niter : integer (optional, default=1)
|
36 |
+
An edge is rewired approximately `niter` times.
|
37 |
+
|
38 |
+
connectivity : boolean (optional, default=True)
|
39 |
+
When True, ensure connectivity for the randomized graph.
|
40 |
+
|
41 |
+
seed : integer, random_state, or None (default)
|
42 |
+
Indicator of random number generation state.
|
43 |
+
See :ref:`Randomness<randomness>`.
|
44 |
+
|
45 |
+
Returns
|
46 |
+
-------
|
47 |
+
G : graph
|
48 |
+
The randomized graph.
|
49 |
+
|
50 |
+
Raises
|
51 |
+
------
|
52 |
+
NetworkXError
|
53 |
+
If there are fewer than 4 nodes or 2 edges in `G`
|
54 |
+
|
55 |
+
Notes
|
56 |
+
-----
|
57 |
+
The implementation is adapted from the algorithm by Maslov and Sneppen
|
58 |
+
(2002) [1]_.
|
59 |
+
|
60 |
+
References
|
61 |
+
----------
|
62 |
+
.. [1] Maslov, Sergei, and Kim Sneppen.
|
63 |
+
"Specificity and stability in topology of protein networks."
|
64 |
+
Science 296.5569 (2002): 910-913.
|
65 |
+
"""
|
66 |
+
if len(G) < 4:
|
67 |
+
raise nx.NetworkXError("Graph has fewer than four nodes.")
|
68 |
+
if len(G.edges) < 2:
|
69 |
+
raise nx.NetworkXError("Graph has fewer that 2 edges")
|
70 |
+
|
71 |
+
from networkx.utils import cumulative_distribution, discrete_sequence
|
72 |
+
|
73 |
+
local_conn = nx.connectivity.local_edge_connectivity
|
74 |
+
|
75 |
+
G = G.copy()
|
76 |
+
keys, degrees = zip(*G.degree()) # keys, degree
|
77 |
+
cdf = cumulative_distribution(degrees) # cdf of degree
|
78 |
+
nnodes = len(G)
|
79 |
+
nedges = nx.number_of_edges(G)
|
80 |
+
niter = niter * nedges
|
81 |
+
ntries = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2))
|
82 |
+
swapcount = 0
|
83 |
+
|
84 |
+
for i in range(niter):
|
85 |
+
n = 0
|
86 |
+
while n < ntries:
|
87 |
+
# pick two random edges without creating edge list
|
88 |
+
# choose source node indices from discrete distribution
|
89 |
+
(ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed)
|
90 |
+
if ai == ci:
|
91 |
+
continue # same source, skip
|
92 |
+
a = keys[ai] # convert index to label
|
93 |
+
c = keys[ci]
|
94 |
+
# choose target uniformly from neighbors
|
95 |
+
b = seed.choice(list(G.neighbors(a)))
|
96 |
+
d = seed.choice(list(G.neighbors(c)))
|
97 |
+
if b in [a, c, d] or d in [a, b, c]:
|
98 |
+
continue # all vertices should be different
|
99 |
+
|
100 |
+
# don't create parallel edges
|
101 |
+
if (d not in G[a]) and (b not in G[c]):
|
102 |
+
G.add_edge(a, d)
|
103 |
+
G.add_edge(c, b)
|
104 |
+
G.remove_edge(a, b)
|
105 |
+
G.remove_edge(c, d)
|
106 |
+
|
107 |
+
# Check if the graph is still connected
|
108 |
+
if connectivity and local_conn(G, a, b) == 0:
|
109 |
+
# Not connected, revert the swap
|
110 |
+
G.remove_edge(a, d)
|
111 |
+
G.remove_edge(c, b)
|
112 |
+
G.add_edge(a, b)
|
113 |
+
G.add_edge(c, d)
|
114 |
+
else:
|
115 |
+
swapcount += 1
|
116 |
+
break
|
117 |
+
n += 1
|
118 |
+
return G
|
119 |
+
|
120 |
+
|
121 |
+
@not_implemented_for("directed")
|
122 |
+
@not_implemented_for("multigraph")
|
123 |
+
@py_random_state(4)
|
124 |
+
@nx._dispatchable(returns_graph=True)
|
125 |
+
def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None):
|
126 |
+
"""Latticize the given graph by swapping edges.
|
127 |
+
|
128 |
+
Parameters
|
129 |
+
----------
|
130 |
+
G : graph
|
131 |
+
An undirected graph.
|
132 |
+
|
133 |
+
niter : integer (optional, default=1)
|
134 |
+
An edge is rewired approximately niter times.
|
135 |
+
|
136 |
+
D : numpy.array (optional, default=None)
|
137 |
+
Distance to the diagonal matrix.
|
138 |
+
|
139 |
+
connectivity : boolean (optional, default=True)
|
140 |
+
Ensure connectivity for the latticized graph when set to True.
|
141 |
+
|
142 |
+
seed : integer, random_state, or None (default)
|
143 |
+
Indicator of random number generation state.
|
144 |
+
See :ref:`Randomness<randomness>`.
|
145 |
+
|
146 |
+
Returns
|
147 |
+
-------
|
148 |
+
G : graph
|
149 |
+
The latticized graph.
|
150 |
+
|
151 |
+
Raises
|
152 |
+
------
|
153 |
+
NetworkXError
|
154 |
+
If there are fewer than 4 nodes or 2 edges in `G`
|
155 |
+
|
156 |
+
Notes
|
157 |
+
-----
|
158 |
+
The implementation is adapted from the algorithm by Sporns et al. [1]_.
|
159 |
+
which is inspired from the original work by Maslov and Sneppen(2002) [2]_.
|
160 |
+
|
161 |
+
References
|
162 |
+
----------
|
163 |
+
.. [1] Sporns, Olaf, and Jonathan D. Zwi.
|
164 |
+
"The small world of the cerebral cortex."
|
165 |
+
Neuroinformatics 2.2 (2004): 145-162.
|
166 |
+
.. [2] Maslov, Sergei, and Kim Sneppen.
|
167 |
+
"Specificity and stability in topology of protein networks."
|
168 |
+
Science 296.5569 (2002): 910-913.
|
169 |
+
"""
|
170 |
+
import numpy as np
|
171 |
+
|
172 |
+
from networkx.utils import cumulative_distribution, discrete_sequence
|
173 |
+
|
174 |
+
local_conn = nx.connectivity.local_edge_connectivity
|
175 |
+
|
176 |
+
if len(G) < 4:
|
177 |
+
raise nx.NetworkXError("Graph has fewer than four nodes.")
|
178 |
+
if len(G.edges) < 2:
|
179 |
+
raise nx.NetworkXError("Graph has fewer that 2 edges")
|
180 |
+
# Instead of choosing uniformly at random from a generated edge list,
|
181 |
+
# this algorithm chooses nonuniformly from the set of nodes with
|
182 |
+
# probability weighted by degree.
|
183 |
+
G = G.copy()
|
184 |
+
keys, degrees = zip(*G.degree()) # keys, degree
|
185 |
+
cdf = cumulative_distribution(degrees) # cdf of degree
|
186 |
+
|
187 |
+
nnodes = len(G)
|
188 |
+
nedges = nx.number_of_edges(G)
|
189 |
+
if D is None:
|
190 |
+
D = np.zeros((nnodes, nnodes))
|
191 |
+
un = np.arange(1, nnodes)
|
192 |
+
um = np.arange(nnodes - 1, 0, -1)
|
193 |
+
u = np.append((0,), np.where(un < um, un, um))
|
194 |
+
|
195 |
+
for v in range(int(np.ceil(nnodes / 2))):
|
196 |
+
D[nnodes - v - 1, :] = np.append(u[v + 1 :], u[: v + 1])
|
197 |
+
D[v, :] = D[nnodes - v - 1, :][::-1]
|
198 |
+
|
199 |
+
niter = niter * nedges
|
200 |
+
# maximal number of rewiring attempts per 'niter'
|
201 |
+
max_attempts = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2))
|
202 |
+
|
203 |
+
for _ in range(niter):
|
204 |
+
n = 0
|
205 |
+
while n < max_attempts:
|
206 |
+
# pick two random edges without creating edge list
|
207 |
+
# choose source node indices from discrete distribution
|
208 |
+
(ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed)
|
209 |
+
if ai == ci:
|
210 |
+
continue # same source, skip
|
211 |
+
a = keys[ai] # convert index to label
|
212 |
+
c = keys[ci]
|
213 |
+
# choose target uniformly from neighbors
|
214 |
+
b = seed.choice(list(G.neighbors(a)))
|
215 |
+
d = seed.choice(list(G.neighbors(c)))
|
216 |
+
bi = keys.index(b)
|
217 |
+
di = keys.index(d)
|
218 |
+
|
219 |
+
if b in [a, c, d] or d in [a, b, c]:
|
220 |
+
continue # all vertices should be different
|
221 |
+
|
222 |
+
# don't create parallel edges
|
223 |
+
if (d not in G[a]) and (b not in G[c]):
|
224 |
+
if D[ai, bi] + D[ci, di] >= D[ai, ci] + D[bi, di]:
|
225 |
+
# only swap if we get closer to the diagonal
|
226 |
+
G.add_edge(a, d)
|
227 |
+
G.add_edge(c, b)
|
228 |
+
G.remove_edge(a, b)
|
229 |
+
G.remove_edge(c, d)
|
230 |
+
|
231 |
+
# Check if the graph is still connected
|
232 |
+
if connectivity and local_conn(G, a, b) == 0:
|
233 |
+
# Not connected, revert the swap
|
234 |
+
G.remove_edge(a, d)
|
235 |
+
G.remove_edge(c, b)
|
236 |
+
G.add_edge(a, b)
|
237 |
+
G.add_edge(c, d)
|
238 |
+
else:
|
239 |
+
break
|
240 |
+
n += 1
|
241 |
+
|
242 |
+
return G
|
243 |
+
|
244 |
+
|
245 |
+
@not_implemented_for("directed")
|
246 |
+
@not_implemented_for("multigraph")
|
247 |
+
@py_random_state(3)
|
248 |
+
@nx._dispatchable
|
249 |
+
def sigma(G, niter=100, nrand=10, seed=None):
|
250 |
+
"""Returns the small-world coefficient (sigma) of the given graph.
|
251 |
+
|
252 |
+
The small-world coefficient is defined as:
|
253 |
+
sigma = C/Cr / L/Lr
|
254 |
+
where C and L are respectively the average clustering coefficient and
|
255 |
+
average shortest path length of G. Cr and Lr are respectively the average
|
256 |
+
clustering coefficient and average shortest path length of an equivalent
|
257 |
+
random graph.
|
258 |
+
|
259 |
+
A graph is commonly classified as small-world if sigma>1.
|
260 |
+
|
261 |
+
Parameters
|
262 |
+
----------
|
263 |
+
G : NetworkX graph
|
264 |
+
An undirected graph.
|
265 |
+
niter : integer (optional, default=100)
|
266 |
+
Approximate number of rewiring per edge to compute the equivalent
|
267 |
+
random graph.
|
268 |
+
nrand : integer (optional, default=10)
|
269 |
+
Number of random graphs generated to compute the average clustering
|
270 |
+
coefficient (Cr) and average shortest path length (Lr).
|
271 |
+
seed : integer, random_state, or None (default)
|
272 |
+
Indicator of random number generation state.
|
273 |
+
See :ref:`Randomness<randomness>`.
|
274 |
+
|
275 |
+
Returns
|
276 |
+
-------
|
277 |
+
sigma : float
|
278 |
+
The small-world coefficient of G.
|
279 |
+
|
280 |
+
Notes
|
281 |
+
-----
|
282 |
+
The implementation is adapted from Humphries et al. [1]_ [2]_.
|
283 |
+
|
284 |
+
References
|
285 |
+
----------
|
286 |
+
.. [1] The brainstem reticular formation is a small-world, not scale-free,
|
287 |
+
network M. D. Humphries, K. Gurney and T. J. Prescott,
|
288 |
+
Proc. Roy. Soc. B 2006 273, 503-511, doi:10.1098/rspb.2005.3354.
|
289 |
+
.. [2] Humphries and Gurney (2008).
|
290 |
+
"Network 'Small-World-Ness': A Quantitative Method for Determining
|
291 |
+
Canonical Network Equivalence".
|
292 |
+
PLoS One. 3 (4). PMID 18446219. doi:10.1371/journal.pone.0002051.
|
293 |
+
"""
|
294 |
+
import numpy as np
|
295 |
+
|
296 |
+
# Compute the mean clustering coefficient and average shortest path length
|
297 |
+
# for an equivalent random graph
|
298 |
+
randMetrics = {"C": [], "L": []}
|
299 |
+
for i in range(nrand):
|
300 |
+
Gr = random_reference(G, niter=niter, seed=seed)
|
301 |
+
randMetrics["C"].append(nx.transitivity(Gr))
|
302 |
+
randMetrics["L"].append(nx.average_shortest_path_length(Gr))
|
303 |
+
|
304 |
+
C = nx.transitivity(G)
|
305 |
+
L = nx.average_shortest_path_length(G)
|
306 |
+
Cr = np.mean(randMetrics["C"])
|
307 |
+
Lr = np.mean(randMetrics["L"])
|
308 |
+
|
309 |
+
sigma = (C / Cr) / (L / Lr)
|
310 |
+
|
311 |
+
return float(sigma)
|
312 |
+
|
313 |
+
|
314 |
+
@not_implemented_for("directed")
|
315 |
+
@not_implemented_for("multigraph")
|
316 |
+
@py_random_state(3)
|
317 |
+
@nx._dispatchable
|
318 |
+
def omega(G, niter=5, nrand=10, seed=None):
|
319 |
+
"""Returns the small-world coefficient (omega) of a graph
|
320 |
+
|
321 |
+
The small-world coefficient of a graph G is:
|
322 |
+
|
323 |
+
omega = Lr/L - C/Cl
|
324 |
+
|
325 |
+
where C and L are respectively the average clustering coefficient and
|
326 |
+
average shortest path length of G. Lr is the average shortest path length
|
327 |
+
of an equivalent random graph and Cl is the average clustering coefficient
|
328 |
+
of an equivalent lattice graph.
|
329 |
+
|
330 |
+
The small-world coefficient (omega) measures how much G is like a lattice
|
331 |
+
or a random graph. Negative values mean G is similar to a lattice whereas
|
332 |
+
positive values mean G is a random graph.
|
333 |
+
Values close to 0 mean that G has small-world characteristics.
|
334 |
+
|
335 |
+
Parameters
|
336 |
+
----------
|
337 |
+
G : NetworkX graph
|
338 |
+
An undirected graph.
|
339 |
+
|
340 |
+
niter: integer (optional, default=5)
|
341 |
+
Approximate number of rewiring per edge to compute the equivalent
|
342 |
+
random graph.
|
343 |
+
|
344 |
+
nrand: integer (optional, default=10)
|
345 |
+
Number of random graphs generated to compute the maximal clustering
|
346 |
+
coefficient (Cr) and average shortest path length (Lr).
|
347 |
+
|
348 |
+
seed : integer, random_state, or None (default)
|
349 |
+
Indicator of random number generation state.
|
350 |
+
See :ref:`Randomness<randomness>`.
|
351 |
+
|
352 |
+
|
353 |
+
Returns
|
354 |
+
-------
|
355 |
+
omega : float
|
356 |
+
The small-world coefficient (omega)
|
357 |
+
|
358 |
+
Notes
|
359 |
+
-----
|
360 |
+
The implementation is adapted from the algorithm by Telesford et al. [1]_.
|
361 |
+
|
362 |
+
References
|
363 |
+
----------
|
364 |
+
.. [1] Telesford, Joyce, Hayasaka, Burdette, and Laurienti (2011).
|
365 |
+
"The Ubiquity of Small-World Networks".
|
366 |
+
Brain Connectivity. 1 (0038): 367-75. PMC 3604768. PMID 22432451.
|
367 |
+
doi:10.1089/brain.2011.0038.
|
368 |
+
"""
|
369 |
+
import numpy as np
|
370 |
+
|
371 |
+
# Compute the mean clustering coefficient and average shortest path length
|
372 |
+
# for an equivalent random graph
|
373 |
+
randMetrics = {"C": [], "L": []}
|
374 |
+
|
375 |
+
# Calculate initial average clustering coefficient which potentially will
|
376 |
+
# get replaced by higher clustering coefficients from generated lattice
|
377 |
+
# reference graphs
|
378 |
+
Cl = nx.average_clustering(G)
|
379 |
+
|
380 |
+
niter_lattice_reference = niter
|
381 |
+
niter_random_reference = niter * 2
|
382 |
+
|
383 |
+
for _ in range(nrand):
|
384 |
+
# Generate random graph
|
385 |
+
Gr = random_reference(G, niter=niter_random_reference, seed=seed)
|
386 |
+
randMetrics["L"].append(nx.average_shortest_path_length(Gr))
|
387 |
+
|
388 |
+
# Generate lattice graph
|
389 |
+
Gl = lattice_reference(G, niter=niter_lattice_reference, seed=seed)
|
390 |
+
|
391 |
+
# Replace old clustering coefficient, if clustering is higher in
|
392 |
+
# generated lattice reference
|
393 |
+
Cl_temp = nx.average_clustering(Gl)
|
394 |
+
if Cl_temp > Cl:
|
395 |
+
Cl = Cl_temp
|
396 |
+
|
397 |
+
C = nx.average_clustering(G)
|
398 |
+
L = nx.average_shortest_path_length(G)
|
399 |
+
Lr = np.mean(randMetrics["L"])
|
400 |
+
|
401 |
+
omega = (Lr / L) - (C / Cl)
|
402 |
+
|
403 |
+
return float(omega)
|
venv/lib/python3.10/site-packages/networkx/algorithms/sparsifiers.py
ADDED
@@ -0,0 +1,295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing sparsifiers of graphs."""
|
2 |
+
import math
|
3 |
+
|
4 |
+
import networkx as nx
|
5 |
+
from networkx.utils import not_implemented_for, py_random_state
|
6 |
+
|
7 |
+
__all__ = ["spanner"]
|
8 |
+
|
9 |
+
|
10 |
+
@not_implemented_for("directed")
|
11 |
+
@not_implemented_for("multigraph")
|
12 |
+
@py_random_state(3)
|
13 |
+
@nx._dispatchable(edge_attrs="weight", returns_graph=True)
|
14 |
+
def spanner(G, stretch, weight=None, seed=None):
|
15 |
+
"""Returns a spanner of the given graph with the given stretch.
|
16 |
+
|
17 |
+
A spanner of a graph G = (V, E) with stretch t is a subgraph
|
18 |
+
H = (V, E_S) such that E_S is a subset of E and the distance between
|
19 |
+
any pair of nodes in H is at most t times the distance between the
|
20 |
+
nodes in G.
|
21 |
+
|
22 |
+
Parameters
|
23 |
+
----------
|
24 |
+
G : NetworkX graph
|
25 |
+
An undirected simple graph.
|
26 |
+
|
27 |
+
stretch : float
|
28 |
+
The stretch of the spanner.
|
29 |
+
|
30 |
+
weight : object
|
31 |
+
The edge attribute to use as distance.
|
32 |
+
|
33 |
+
seed : integer, random_state, or None (default)
|
34 |
+
Indicator of random number generation state.
|
35 |
+
See :ref:`Randomness<randomness>`.
|
36 |
+
|
37 |
+
Returns
|
38 |
+
-------
|
39 |
+
NetworkX graph
|
40 |
+
A spanner of the given graph with the given stretch.
|
41 |
+
|
42 |
+
Raises
|
43 |
+
------
|
44 |
+
ValueError
|
45 |
+
If a stretch less than 1 is given.
|
46 |
+
|
47 |
+
Notes
|
48 |
+
-----
|
49 |
+
This function implements the spanner algorithm by Baswana and Sen,
|
50 |
+
see [1].
|
51 |
+
|
52 |
+
This algorithm is a randomized las vegas algorithm: The expected
|
53 |
+
running time is O(km) where k = (stretch + 1) // 2 and m is the
|
54 |
+
number of edges in G. The returned graph is always a spanner of the
|
55 |
+
given graph with the specified stretch. For weighted graphs the
|
56 |
+
number of edges in the spanner is O(k * n^(1 + 1 / k)) where k is
|
57 |
+
defined as above and n is the number of nodes in G. For unweighted
|
58 |
+
graphs the number of edges is O(n^(1 + 1 / k) + kn).
|
59 |
+
|
60 |
+
References
|
61 |
+
----------
|
62 |
+
[1] S. Baswana, S. Sen. A Simple and Linear Time Randomized
|
63 |
+
Algorithm for Computing Sparse Spanners in Weighted Graphs.
|
64 |
+
Random Struct. Algorithms 30(4): 532-563 (2007).
|
65 |
+
"""
|
66 |
+
if stretch < 1:
|
67 |
+
raise ValueError("stretch must be at least 1")
|
68 |
+
|
69 |
+
k = (stretch + 1) // 2
|
70 |
+
|
71 |
+
# initialize spanner H with empty edge set
|
72 |
+
H = nx.empty_graph()
|
73 |
+
H.add_nodes_from(G.nodes)
|
74 |
+
|
75 |
+
# phase 1: forming the clusters
|
76 |
+
# the residual graph has V' from the paper as its node set
|
77 |
+
# and E' from the paper as its edge set
|
78 |
+
residual_graph = _setup_residual_graph(G, weight)
|
79 |
+
# clustering is a dictionary that maps nodes in a cluster to the
|
80 |
+
# cluster center
|
81 |
+
clustering = {v: v for v in G.nodes}
|
82 |
+
sample_prob = math.pow(G.number_of_nodes(), -1 / k)
|
83 |
+
size_limit = 2 * math.pow(G.number_of_nodes(), 1 + 1 / k)
|
84 |
+
|
85 |
+
i = 0
|
86 |
+
while i < k - 1:
|
87 |
+
# step 1: sample centers
|
88 |
+
sampled_centers = set()
|
89 |
+
for center in set(clustering.values()):
|
90 |
+
if seed.random() < sample_prob:
|
91 |
+
sampled_centers.add(center)
|
92 |
+
|
93 |
+
# combined loop for steps 2 and 3
|
94 |
+
edges_to_add = set()
|
95 |
+
edges_to_remove = set()
|
96 |
+
new_clustering = {}
|
97 |
+
for v in residual_graph.nodes:
|
98 |
+
if clustering[v] in sampled_centers:
|
99 |
+
continue
|
100 |
+
|
101 |
+
# step 2: find neighboring (sampled) clusters and
|
102 |
+
# lightest edges to them
|
103 |
+
lightest_edge_neighbor, lightest_edge_weight = _lightest_edge_dicts(
|
104 |
+
residual_graph, clustering, v
|
105 |
+
)
|
106 |
+
neighboring_sampled_centers = (
|
107 |
+
set(lightest_edge_weight.keys()) & sampled_centers
|
108 |
+
)
|
109 |
+
|
110 |
+
# step 3: add edges to spanner
|
111 |
+
if not neighboring_sampled_centers:
|
112 |
+
# connect to each neighboring center via lightest edge
|
113 |
+
for neighbor in lightest_edge_neighbor.values():
|
114 |
+
edges_to_add.add((v, neighbor))
|
115 |
+
# remove all incident edges
|
116 |
+
for neighbor in residual_graph.adj[v]:
|
117 |
+
edges_to_remove.add((v, neighbor))
|
118 |
+
|
119 |
+
else: # there is a neighboring sampled center
|
120 |
+
closest_center = min(
|
121 |
+
neighboring_sampled_centers, key=lightest_edge_weight.get
|
122 |
+
)
|
123 |
+
closest_center_weight = lightest_edge_weight[closest_center]
|
124 |
+
closest_center_neighbor = lightest_edge_neighbor[closest_center]
|
125 |
+
|
126 |
+
edges_to_add.add((v, closest_center_neighbor))
|
127 |
+
new_clustering[v] = closest_center
|
128 |
+
|
129 |
+
# connect to centers with edge weight less than
|
130 |
+
# closest_center_weight
|
131 |
+
for center, edge_weight in lightest_edge_weight.items():
|
132 |
+
if edge_weight < closest_center_weight:
|
133 |
+
neighbor = lightest_edge_neighbor[center]
|
134 |
+
edges_to_add.add((v, neighbor))
|
135 |
+
|
136 |
+
# remove edges to centers with edge weight less than
|
137 |
+
# closest_center_weight
|
138 |
+
for neighbor in residual_graph.adj[v]:
|
139 |
+
nbr_cluster = clustering[neighbor]
|
140 |
+
nbr_weight = lightest_edge_weight[nbr_cluster]
|
141 |
+
if (
|
142 |
+
nbr_cluster == closest_center
|
143 |
+
or nbr_weight < closest_center_weight
|
144 |
+
):
|
145 |
+
edges_to_remove.add((v, neighbor))
|
146 |
+
|
147 |
+
# check whether iteration added too many edges to spanner,
|
148 |
+
# if so repeat
|
149 |
+
if len(edges_to_add) > size_limit:
|
150 |
+
# an iteration is repeated O(1) times on expectation
|
151 |
+
continue
|
152 |
+
|
153 |
+
# iteration succeeded
|
154 |
+
i = i + 1
|
155 |
+
|
156 |
+
# actually add edges to spanner
|
157 |
+
for u, v in edges_to_add:
|
158 |
+
_add_edge_to_spanner(H, residual_graph, u, v, weight)
|
159 |
+
|
160 |
+
# actually delete edges from residual graph
|
161 |
+
residual_graph.remove_edges_from(edges_to_remove)
|
162 |
+
|
163 |
+
# copy old clustering data to new_clustering
|
164 |
+
for node, center in clustering.items():
|
165 |
+
if center in sampled_centers:
|
166 |
+
new_clustering[node] = center
|
167 |
+
clustering = new_clustering
|
168 |
+
|
169 |
+
# step 4: remove intra-cluster edges
|
170 |
+
for u in residual_graph.nodes:
|
171 |
+
for v in list(residual_graph.adj[u]):
|
172 |
+
if clustering[u] == clustering[v]:
|
173 |
+
residual_graph.remove_edge(u, v)
|
174 |
+
|
175 |
+
# update residual graph node set
|
176 |
+
for v in list(residual_graph.nodes):
|
177 |
+
if v not in clustering:
|
178 |
+
residual_graph.remove_node(v)
|
179 |
+
|
180 |
+
# phase 2: vertex-cluster joining
|
181 |
+
for v in residual_graph.nodes:
|
182 |
+
lightest_edge_neighbor, _ = _lightest_edge_dicts(residual_graph, clustering, v)
|
183 |
+
for neighbor in lightest_edge_neighbor.values():
|
184 |
+
_add_edge_to_spanner(H, residual_graph, v, neighbor, weight)
|
185 |
+
|
186 |
+
return H
|
187 |
+
|
188 |
+
|
189 |
+
def _setup_residual_graph(G, weight):
|
190 |
+
"""Setup residual graph as a copy of G with unique edges weights.
|
191 |
+
|
192 |
+
The node set of the residual graph corresponds to the set V' from
|
193 |
+
the Baswana-Sen paper and the edge set corresponds to the set E'
|
194 |
+
from the paper.
|
195 |
+
|
196 |
+
This function associates distinct weights to the edges of the
|
197 |
+
residual graph (even for unweighted input graphs), as required by
|
198 |
+
the algorithm.
|
199 |
+
|
200 |
+
Parameters
|
201 |
+
----------
|
202 |
+
G : NetworkX graph
|
203 |
+
An undirected simple graph.
|
204 |
+
|
205 |
+
weight : object
|
206 |
+
The edge attribute to use as distance.
|
207 |
+
|
208 |
+
Returns
|
209 |
+
-------
|
210 |
+
NetworkX graph
|
211 |
+
The residual graph used for the Baswana-Sen algorithm.
|
212 |
+
"""
|
213 |
+
residual_graph = G.copy()
|
214 |
+
|
215 |
+
# establish unique edge weights, even for unweighted graphs
|
216 |
+
for u, v in G.edges():
|
217 |
+
if not weight:
|
218 |
+
residual_graph[u][v]["weight"] = (id(u), id(v))
|
219 |
+
else:
|
220 |
+
residual_graph[u][v]["weight"] = (G[u][v][weight], id(u), id(v))
|
221 |
+
|
222 |
+
return residual_graph
|
223 |
+
|
224 |
+
|
225 |
+
def _lightest_edge_dicts(residual_graph, clustering, node):
|
226 |
+
"""Find the lightest edge to each cluster.
|
227 |
+
|
228 |
+
Searches for the minimum-weight edge to each cluster adjacent to
|
229 |
+
the given node.
|
230 |
+
|
231 |
+
Parameters
|
232 |
+
----------
|
233 |
+
residual_graph : NetworkX graph
|
234 |
+
The residual graph used by the Baswana-Sen algorithm.
|
235 |
+
|
236 |
+
clustering : dictionary
|
237 |
+
The current clustering of the nodes.
|
238 |
+
|
239 |
+
node : node
|
240 |
+
The node from which the search originates.
|
241 |
+
|
242 |
+
Returns
|
243 |
+
-------
|
244 |
+
lightest_edge_neighbor, lightest_edge_weight : dictionary, dictionary
|
245 |
+
lightest_edge_neighbor is a dictionary that maps a center C to
|
246 |
+
a node v in the corresponding cluster such that the edge from
|
247 |
+
the given node to v is the lightest edge from the given node to
|
248 |
+
any node in cluster. lightest_edge_weight maps a center C to the
|
249 |
+
weight of the aforementioned edge.
|
250 |
+
|
251 |
+
Notes
|
252 |
+
-----
|
253 |
+
If a cluster has no node that is adjacent to the given node in the
|
254 |
+
residual graph then the center of the cluster is not a key in the
|
255 |
+
returned dictionaries.
|
256 |
+
"""
|
257 |
+
lightest_edge_neighbor = {}
|
258 |
+
lightest_edge_weight = {}
|
259 |
+
for neighbor in residual_graph.adj[node]:
|
260 |
+
nbr_center = clustering[neighbor]
|
261 |
+
weight = residual_graph[node][neighbor]["weight"]
|
262 |
+
if (
|
263 |
+
nbr_center not in lightest_edge_weight
|
264 |
+
or weight < lightest_edge_weight[nbr_center]
|
265 |
+
):
|
266 |
+
lightest_edge_neighbor[nbr_center] = neighbor
|
267 |
+
lightest_edge_weight[nbr_center] = weight
|
268 |
+
return lightest_edge_neighbor, lightest_edge_weight
|
269 |
+
|
270 |
+
|
271 |
+
def _add_edge_to_spanner(H, residual_graph, u, v, weight):
|
272 |
+
"""Add the edge {u, v} to the spanner H and take weight from
|
273 |
+
the residual graph.
|
274 |
+
|
275 |
+
Parameters
|
276 |
+
----------
|
277 |
+
H : NetworkX graph
|
278 |
+
The spanner under construction.
|
279 |
+
|
280 |
+
residual_graph : NetworkX graph
|
281 |
+
The residual graph used by the Baswana-Sen algorithm. The weight
|
282 |
+
for the edge is taken from this graph.
|
283 |
+
|
284 |
+
u : node
|
285 |
+
One endpoint of the edge.
|
286 |
+
|
287 |
+
v : node
|
288 |
+
The other endpoint of the edge.
|
289 |
+
|
290 |
+
weight : object
|
291 |
+
The edge attribute to use as distance.
|
292 |
+
"""
|
293 |
+
H.add_edge(u, v)
|
294 |
+
if weight:
|
295 |
+
H[u][v][weight] = residual_graph[u][v]["weight"][0]
|
venv/lib/python3.10/site-packages/networkx/algorithms/structuralholes.py
ADDED
@@ -0,0 +1,283 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing measures of structural holes."""
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = ["constraint", "local_constraint", "effective_size"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable(edge_attrs="weight")
|
9 |
+
def mutual_weight(G, u, v, weight=None):
|
10 |
+
"""Returns the sum of the weights of the edge from `u` to `v` and
|
11 |
+
the edge from `v` to `u` in `G`.
|
12 |
+
|
13 |
+
`weight` is the edge data key that represents the edge weight. If
|
14 |
+
the specified key is `None` or is not in the edge data for an edge,
|
15 |
+
that edge is assumed to have weight 1.
|
16 |
+
|
17 |
+
Pre-conditions: `u` and `v` must both be in `G`.
|
18 |
+
|
19 |
+
"""
|
20 |
+
try:
|
21 |
+
a_uv = G[u][v].get(weight, 1)
|
22 |
+
except KeyError:
|
23 |
+
a_uv = 0
|
24 |
+
try:
|
25 |
+
a_vu = G[v][u].get(weight, 1)
|
26 |
+
except KeyError:
|
27 |
+
a_vu = 0
|
28 |
+
return a_uv + a_vu
|
29 |
+
|
30 |
+
|
31 |
+
@nx._dispatchable(edge_attrs="weight")
|
32 |
+
def normalized_mutual_weight(G, u, v, norm=sum, weight=None):
|
33 |
+
"""Returns normalized mutual weight of the edges from `u` to `v`
|
34 |
+
with respect to the mutual weights of the neighbors of `u` in `G`.
|
35 |
+
|
36 |
+
`norm` specifies how the normalization factor is computed. It must
|
37 |
+
be a function that takes a single argument and returns a number.
|
38 |
+
The argument will be an iterable of mutual weights
|
39 |
+
of pairs ``(u, w)``, where ``w`` ranges over each (in- and
|
40 |
+
out-)neighbor of ``u``. Commons values for `normalization` are
|
41 |
+
``sum`` and ``max``.
|
42 |
+
|
43 |
+
`weight` can be ``None`` or a string, if None, all edge weights
|
44 |
+
are considered equal. Otherwise holds the name of the edge
|
45 |
+
attribute used as weight.
|
46 |
+
|
47 |
+
"""
|
48 |
+
scale = norm(mutual_weight(G, u, w, weight) for w in set(nx.all_neighbors(G, u)))
|
49 |
+
return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale
|
50 |
+
|
51 |
+
|
52 |
+
@nx._dispatchable(edge_attrs="weight")
|
53 |
+
def effective_size(G, nodes=None, weight=None):
|
54 |
+
r"""Returns the effective size of all nodes in the graph ``G``.
|
55 |
+
|
56 |
+
The *effective size* of a node's ego network is based on the concept
|
57 |
+
of redundancy. A person's ego network has redundancy to the extent
|
58 |
+
that her contacts are connected to each other as well. The
|
59 |
+
nonredundant part of a person's relationships is the effective
|
60 |
+
size of her ego network [1]_. Formally, the effective size of a
|
61 |
+
node $u$, denoted $e(u)$, is defined by
|
62 |
+
|
63 |
+
.. math::
|
64 |
+
|
65 |
+
e(u) = \sum_{v \in N(u) \setminus \{u\}}
|
66 |
+
\left(1 - \sum_{w \in N(v)} p_{uw} m_{vw}\right)
|
67 |
+
|
68 |
+
where $N(u)$ is the set of neighbors of $u$ and $p_{uw}$ is the
|
69 |
+
normalized mutual weight of the (directed or undirected) edges
|
70 |
+
joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. And $m_{vw}$
|
71 |
+
is the mutual weight of $v$ and $w$ divided by $v$ highest mutual
|
72 |
+
weight with any of its neighbors. The *mutual weight* of $u$ and $v$
|
73 |
+
is the sum of the weights of edges joining them (edge weights are
|
74 |
+
assumed to be one if the graph is unweighted).
|
75 |
+
|
76 |
+
For the case of unweighted and undirected graphs, Borgatti proposed
|
77 |
+
a simplified formula to compute effective size [2]_
|
78 |
+
|
79 |
+
.. math::
|
80 |
+
|
81 |
+
e(u) = n - \frac{2t}{n}
|
82 |
+
|
83 |
+
where `t` is the number of ties in the ego network (not including
|
84 |
+
ties to ego) and `n` is the number of nodes (excluding ego).
|
85 |
+
|
86 |
+
Parameters
|
87 |
+
----------
|
88 |
+
G : NetworkX graph
|
89 |
+
The graph containing ``v``. Directed graphs are treated like
|
90 |
+
undirected graphs when computing neighbors of ``v``.
|
91 |
+
|
92 |
+
nodes : container, optional
|
93 |
+
Container of nodes in the graph ``G`` to compute the effective size.
|
94 |
+
If None, the effective size of every node is computed.
|
95 |
+
|
96 |
+
weight : None or string, optional
|
97 |
+
If None, all edge weights are considered equal.
|
98 |
+
Otherwise holds the name of the edge attribute used as weight.
|
99 |
+
|
100 |
+
Returns
|
101 |
+
-------
|
102 |
+
dict
|
103 |
+
Dictionary with nodes as keys and the effective size of the node as values.
|
104 |
+
|
105 |
+
Notes
|
106 |
+
-----
|
107 |
+
Burt also defined the related concept of *efficiency* of a node's ego
|
108 |
+
network, which is its effective size divided by the degree of that
|
109 |
+
node [1]_. So you can easily compute efficiency:
|
110 |
+
|
111 |
+
>>> G = nx.DiGraph()
|
112 |
+
>>> G.add_edges_from([(0, 1), (0, 2), (1, 0), (2, 1)])
|
113 |
+
>>> esize = nx.effective_size(G)
|
114 |
+
>>> efficiency = {n: v / G.degree(n) for n, v in esize.items()}
|
115 |
+
|
116 |
+
See also
|
117 |
+
--------
|
118 |
+
constraint
|
119 |
+
|
120 |
+
References
|
121 |
+
----------
|
122 |
+
.. [1] Burt, Ronald S.
|
123 |
+
*Structural Holes: The Social Structure of Competition.*
|
124 |
+
Cambridge: Harvard University Press, 1995.
|
125 |
+
|
126 |
+
.. [2] Borgatti, S.
|
127 |
+
"Structural Holes: Unpacking Burt's Redundancy Measures"
|
128 |
+
CONNECTIONS 20(1):35-38.
|
129 |
+
http://www.analytictech.com/connections/v20(1)/holes.htm
|
130 |
+
|
131 |
+
"""
|
132 |
+
|
133 |
+
def redundancy(G, u, v, weight=None):
|
134 |
+
nmw = normalized_mutual_weight
|
135 |
+
r = sum(
|
136 |
+
nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight)
|
137 |
+
for w in set(nx.all_neighbors(G, u))
|
138 |
+
)
|
139 |
+
return 1 - r
|
140 |
+
|
141 |
+
effective_size = {}
|
142 |
+
if nodes is None:
|
143 |
+
nodes = G
|
144 |
+
# Use Borgatti's simplified formula for unweighted and undirected graphs
|
145 |
+
if not G.is_directed() and weight is None:
|
146 |
+
for v in nodes:
|
147 |
+
# Effective size is not defined for isolated nodes
|
148 |
+
if len(G[v]) == 0:
|
149 |
+
effective_size[v] = float("nan")
|
150 |
+
continue
|
151 |
+
E = nx.ego_graph(G, v, center=False, undirected=True)
|
152 |
+
effective_size[v] = len(E) - (2 * E.size()) / len(E)
|
153 |
+
else:
|
154 |
+
for v in nodes:
|
155 |
+
# Effective size is not defined for isolated nodes
|
156 |
+
if len(G[v]) == 0:
|
157 |
+
effective_size[v] = float("nan")
|
158 |
+
continue
|
159 |
+
effective_size[v] = sum(
|
160 |
+
redundancy(G, v, u, weight) for u in set(nx.all_neighbors(G, v))
|
161 |
+
)
|
162 |
+
return effective_size
|
163 |
+
|
164 |
+
|
165 |
+
@nx._dispatchable(edge_attrs="weight")
|
166 |
+
def constraint(G, nodes=None, weight=None):
|
167 |
+
r"""Returns the constraint on all nodes in the graph ``G``.
|
168 |
+
|
169 |
+
The *constraint* is a measure of the extent to which a node *v* is
|
170 |
+
invested in those nodes that are themselves invested in the
|
171 |
+
neighbors of *v*. Formally, the *constraint on v*, denoted `c(v)`,
|
172 |
+
is defined by
|
173 |
+
|
174 |
+
.. math::
|
175 |
+
|
176 |
+
c(v) = \sum_{w \in N(v) \setminus \{v\}} \ell(v, w)
|
177 |
+
|
178 |
+
where $N(v)$ is the subset of the neighbors of `v` that are either
|
179 |
+
predecessors or successors of `v` and $\ell(v, w)$ is the local
|
180 |
+
constraint on `v` with respect to `w` [1]_. For the definition of local
|
181 |
+
constraint, see :func:`local_constraint`.
|
182 |
+
|
183 |
+
Parameters
|
184 |
+
----------
|
185 |
+
G : NetworkX graph
|
186 |
+
The graph containing ``v``. This can be either directed or undirected.
|
187 |
+
|
188 |
+
nodes : container, optional
|
189 |
+
Container of nodes in the graph ``G`` to compute the constraint. If
|
190 |
+
None, the constraint of every node is computed.
|
191 |
+
|
192 |
+
weight : None or string, optional
|
193 |
+
If None, all edge weights are considered equal.
|
194 |
+
Otherwise holds the name of the edge attribute used as weight.
|
195 |
+
|
196 |
+
Returns
|
197 |
+
-------
|
198 |
+
dict
|
199 |
+
Dictionary with nodes as keys and the constraint on the node as values.
|
200 |
+
|
201 |
+
See also
|
202 |
+
--------
|
203 |
+
local_constraint
|
204 |
+
|
205 |
+
References
|
206 |
+
----------
|
207 |
+
.. [1] Burt, Ronald S.
|
208 |
+
"Structural holes and good ideas".
|
209 |
+
American Journal of Sociology (110): 349–399.
|
210 |
+
|
211 |
+
"""
|
212 |
+
if nodes is None:
|
213 |
+
nodes = G
|
214 |
+
constraint = {}
|
215 |
+
for v in nodes:
|
216 |
+
# Constraint is not defined for isolated nodes
|
217 |
+
if len(G[v]) == 0:
|
218 |
+
constraint[v] = float("nan")
|
219 |
+
continue
|
220 |
+
constraint[v] = sum(
|
221 |
+
local_constraint(G, v, n, weight) for n in set(nx.all_neighbors(G, v))
|
222 |
+
)
|
223 |
+
return constraint
|
224 |
+
|
225 |
+
|
226 |
+
@nx._dispatchable(edge_attrs="weight")
|
227 |
+
def local_constraint(G, u, v, weight=None):
|
228 |
+
r"""Returns the local constraint on the node ``u`` with respect to
|
229 |
+
the node ``v`` in the graph ``G``.
|
230 |
+
|
231 |
+
Formally, the *local constraint on u with respect to v*, denoted
|
232 |
+
$\ell(u, v)$, is defined by
|
233 |
+
|
234 |
+
.. math::
|
235 |
+
|
236 |
+
\ell(u, v) = \left(p_{uv} + \sum_{w \in N(v)} p_{uw} p_{wv}\right)^2,
|
237 |
+
|
238 |
+
where $N(v)$ is the set of neighbors of $v$ and $p_{uv}$ is the
|
239 |
+
normalized mutual weight of the (directed or undirected) edges
|
240 |
+
joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. The *mutual
|
241 |
+
weight* of $u$ and $v$ is the sum of the weights of edges joining
|
242 |
+
them (edge weights are assumed to be one if the graph is
|
243 |
+
unweighted).
|
244 |
+
|
245 |
+
Parameters
|
246 |
+
----------
|
247 |
+
G : NetworkX graph
|
248 |
+
The graph containing ``u`` and ``v``. This can be either
|
249 |
+
directed or undirected.
|
250 |
+
|
251 |
+
u : node
|
252 |
+
A node in the graph ``G``.
|
253 |
+
|
254 |
+
v : node
|
255 |
+
A node in the graph ``G``.
|
256 |
+
|
257 |
+
weight : None or string, optional
|
258 |
+
If None, all edge weights are considered equal.
|
259 |
+
Otherwise holds the name of the edge attribute used as weight.
|
260 |
+
|
261 |
+
Returns
|
262 |
+
-------
|
263 |
+
float
|
264 |
+
The constraint of the node ``v`` in the graph ``G``.
|
265 |
+
|
266 |
+
See also
|
267 |
+
--------
|
268 |
+
constraint
|
269 |
+
|
270 |
+
References
|
271 |
+
----------
|
272 |
+
.. [1] Burt, Ronald S.
|
273 |
+
"Structural holes and good ideas".
|
274 |
+
American Journal of Sociology (110): 349–399.
|
275 |
+
|
276 |
+
"""
|
277 |
+
nmw = normalized_mutual_weight
|
278 |
+
direct = nmw(G, u, v, weight=weight)
|
279 |
+
indirect = sum(
|
280 |
+
nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight)
|
281 |
+
for w in set(nx.all_neighbors(G, u))
|
282 |
+
)
|
283 |
+
return (direct + indirect) ** 2
|
venv/lib/python3.10/site-packages/networkx/algorithms/summarization.py
ADDED
@@ -0,0 +1,563 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Graph summarization finds smaller representations of graphs resulting in faster
|
3 |
+
runtime of algorithms, reduced storage needs, and noise reduction.
|
4 |
+
Summarization has applications in areas such as visualization, pattern mining,
|
5 |
+
clustering and community detection, and more. Core graph summarization
|
6 |
+
techniques are grouping/aggregation, bit-compression,
|
7 |
+
simplification/sparsification, and influence based. Graph summarization
|
8 |
+
algorithms often produce either summary graphs in the form of supergraphs or
|
9 |
+
sparsified graphs, or a list of independent structures. Supergraphs are the
|
10 |
+
most common product, which consist of supernodes and original nodes and are
|
11 |
+
connected by edges and superedges, which represent aggregate edges between
|
12 |
+
nodes and supernodes.
|
13 |
+
|
14 |
+
Grouping/aggregation based techniques compress graphs by representing
|
15 |
+
close/connected nodes and edges in a graph by a single node/edge in a
|
16 |
+
supergraph. Nodes can be grouped together into supernodes based on their
|
17 |
+
structural similarities or proximity within a graph to reduce the total number
|
18 |
+
of nodes in a graph. Edge-grouping techniques group edges into lossy/lossless
|
19 |
+
nodes called compressor or virtual nodes to reduce the total number of edges in
|
20 |
+
a graph. Edge-grouping techniques can be lossless, meaning that they can be
|
21 |
+
used to re-create the original graph, or techniques can be lossy, requiring
|
22 |
+
less space to store the summary graph, but at the expense of lower
|
23 |
+
reconstruction accuracy of the original graph.
|
24 |
+
|
25 |
+
Bit-compression techniques minimize the amount of information needed to
|
26 |
+
describe the original graph, while revealing structural patterns in the
|
27 |
+
original graph. The two-part minimum description length (MDL) is often used to
|
28 |
+
represent the model and the original graph in terms of the model. A key
|
29 |
+
difference between graph compression and graph summarization is that graph
|
30 |
+
summarization focuses on finding structural patterns within the original graph,
|
31 |
+
whereas graph compression focuses on compressions the original graph to be as
|
32 |
+
small as possible. **NOTE**: Some bit-compression methods exist solely to
|
33 |
+
compress a graph without creating a summary graph or finding comprehensible
|
34 |
+
structural patterns.
|
35 |
+
|
36 |
+
Simplification/Sparsification techniques attempt to create a sparse
|
37 |
+
representation of a graph by removing unimportant nodes and edges from the
|
38 |
+
graph. Sparsified graphs differ from supergraphs created by
|
39 |
+
grouping/aggregation by only containing a subset of the original nodes and
|
40 |
+
edges of the original graph.
|
41 |
+
|
42 |
+
Influence based techniques aim to find a high-level description of influence
|
43 |
+
propagation in a large graph. These methods are scarce and have been mostly
|
44 |
+
applied to social graphs.
|
45 |
+
|
46 |
+
*dedensification* is a grouping/aggregation based technique to compress the
|
47 |
+
neighborhoods around high-degree nodes in unweighted graphs by adding
|
48 |
+
compressor nodes that summarize multiple edges of the same type to
|
49 |
+
high-degree nodes (nodes with a degree greater than a given threshold).
|
50 |
+
Dedensification was developed for the purpose of increasing performance of
|
51 |
+
query processing around high-degree nodes in graph databases and enables direct
|
52 |
+
operations on the compressed graph. The structural patterns surrounding
|
53 |
+
high-degree nodes in the original is preserved while using fewer edges and
|
54 |
+
adding a small number of compressor nodes. The degree of nodes present in the
|
55 |
+
original graph is also preserved. The current implementation of dedensification
|
56 |
+
supports graphs with one edge type.
|
57 |
+
|
58 |
+
For more information on graph summarization, see `Graph Summarization Methods
|
59 |
+
and Applications: A Survey <https://dl.acm.org/doi/abs/10.1145/3186727>`_
|
60 |
+
"""
|
61 |
+
from collections import Counter, defaultdict
|
62 |
+
|
63 |
+
import networkx as nx
|
64 |
+
|
65 |
+
__all__ = ["dedensify", "snap_aggregation"]
|
66 |
+
|
67 |
+
|
68 |
+
@nx._dispatchable(mutates_input={"not copy": 3}, returns_graph=True)
|
69 |
+
def dedensify(G, threshold, prefix=None, copy=True):
|
70 |
+
"""Compresses neighborhoods around high-degree nodes
|
71 |
+
|
72 |
+
Reduces the number of edges to high-degree nodes by adding compressor nodes
|
73 |
+
that summarize multiple edges of the same type to high-degree nodes (nodes
|
74 |
+
with a degree greater than a given threshold). Dedensification also has
|
75 |
+
the added benefit of reducing the number of edges around high-degree nodes.
|
76 |
+
The implementation currently supports graphs with a single edge type.
|
77 |
+
|
78 |
+
Parameters
|
79 |
+
----------
|
80 |
+
G: graph
|
81 |
+
A networkx graph
|
82 |
+
threshold: int
|
83 |
+
Minimum degree threshold of a node to be considered a high degree node.
|
84 |
+
The threshold must be greater than or equal to 2.
|
85 |
+
prefix: str or None, optional (default: None)
|
86 |
+
An optional prefix for denoting compressor nodes
|
87 |
+
copy: bool, optional (default: True)
|
88 |
+
Indicates if dedensification should be done inplace
|
89 |
+
|
90 |
+
Returns
|
91 |
+
-------
|
92 |
+
dedensified networkx graph : (graph, set)
|
93 |
+
2-tuple of the dedensified graph and set of compressor nodes
|
94 |
+
|
95 |
+
Notes
|
96 |
+
-----
|
97 |
+
According to the algorithm in [1]_, removes edges in a graph by
|
98 |
+
compressing/decompressing the neighborhoods around high degree nodes by
|
99 |
+
adding compressor nodes that summarize multiple edges of the same type
|
100 |
+
to high-degree nodes. Dedensification will only add a compressor node when
|
101 |
+
doing so will reduce the total number of edges in the given graph. This
|
102 |
+
implementation currently supports graphs with a single edge type.
|
103 |
+
|
104 |
+
Examples
|
105 |
+
--------
|
106 |
+
Dedensification will only add compressor nodes when doing so would result
|
107 |
+
in fewer edges::
|
108 |
+
|
109 |
+
>>> original_graph = nx.DiGraph()
|
110 |
+
>>> original_graph.add_nodes_from(
|
111 |
+
... ["1", "2", "3", "4", "5", "6", "A", "B", "C"]
|
112 |
+
... )
|
113 |
+
>>> original_graph.add_edges_from(
|
114 |
+
... [
|
115 |
+
... ("1", "C"), ("1", "B"),
|
116 |
+
... ("2", "C"), ("2", "B"), ("2", "A"),
|
117 |
+
... ("3", "B"), ("3", "A"), ("3", "6"),
|
118 |
+
... ("4", "C"), ("4", "B"), ("4", "A"),
|
119 |
+
... ("5", "B"), ("5", "A"),
|
120 |
+
... ("6", "5"),
|
121 |
+
... ("A", "6")
|
122 |
+
... ]
|
123 |
+
... )
|
124 |
+
>>> c_graph, c_nodes = nx.dedensify(original_graph, threshold=2)
|
125 |
+
>>> original_graph.number_of_edges()
|
126 |
+
15
|
127 |
+
>>> c_graph.number_of_edges()
|
128 |
+
14
|
129 |
+
|
130 |
+
A dedensified, directed graph can be "densified" to reconstruct the
|
131 |
+
original graph::
|
132 |
+
|
133 |
+
>>> original_graph = nx.DiGraph()
|
134 |
+
>>> original_graph.add_nodes_from(
|
135 |
+
... ["1", "2", "3", "4", "5", "6", "A", "B", "C"]
|
136 |
+
... )
|
137 |
+
>>> original_graph.add_edges_from(
|
138 |
+
... [
|
139 |
+
... ("1", "C"), ("1", "B"),
|
140 |
+
... ("2", "C"), ("2", "B"), ("2", "A"),
|
141 |
+
... ("3", "B"), ("3", "A"), ("3", "6"),
|
142 |
+
... ("4", "C"), ("4", "B"), ("4", "A"),
|
143 |
+
... ("5", "B"), ("5", "A"),
|
144 |
+
... ("6", "5"),
|
145 |
+
... ("A", "6")
|
146 |
+
... ]
|
147 |
+
... )
|
148 |
+
>>> c_graph, c_nodes = nx.dedensify(original_graph, threshold=2)
|
149 |
+
>>> # re-densifies the compressed graph into the original graph
|
150 |
+
>>> for c_node in c_nodes:
|
151 |
+
... all_neighbors = set(nx.all_neighbors(c_graph, c_node))
|
152 |
+
... out_neighbors = set(c_graph.neighbors(c_node))
|
153 |
+
... for out_neighbor in out_neighbors:
|
154 |
+
... c_graph.remove_edge(c_node, out_neighbor)
|
155 |
+
... in_neighbors = all_neighbors - out_neighbors
|
156 |
+
... for in_neighbor in in_neighbors:
|
157 |
+
... c_graph.remove_edge(in_neighbor, c_node)
|
158 |
+
... for out_neighbor in out_neighbors:
|
159 |
+
... c_graph.add_edge(in_neighbor, out_neighbor)
|
160 |
+
... c_graph.remove_node(c_node)
|
161 |
+
...
|
162 |
+
>>> nx.is_isomorphic(original_graph, c_graph)
|
163 |
+
True
|
164 |
+
|
165 |
+
References
|
166 |
+
----------
|
167 |
+
.. [1] Maccioni, A., & Abadi, D. J. (2016, August).
|
168 |
+
Scalable pattern matching over compressed graphs via dedensification.
|
169 |
+
In Proceedings of the 22nd ACM SIGKDD International Conference on
|
170 |
+
Knowledge Discovery and Data Mining (pp. 1755-1764).
|
171 |
+
http://www.cs.umd.edu/~abadi/papers/graph-dedense.pdf
|
172 |
+
"""
|
173 |
+
if threshold < 2:
|
174 |
+
raise nx.NetworkXError("The degree threshold must be >= 2")
|
175 |
+
|
176 |
+
degrees = G.in_degree if G.is_directed() else G.degree
|
177 |
+
# Group nodes based on degree threshold
|
178 |
+
high_degree_nodes = {n for n, d in degrees if d > threshold}
|
179 |
+
low_degree_nodes = G.nodes() - high_degree_nodes
|
180 |
+
|
181 |
+
auxiliary = {}
|
182 |
+
for node in G:
|
183 |
+
high_degree_nbrs = frozenset(high_degree_nodes & set(G[node]))
|
184 |
+
if high_degree_nbrs:
|
185 |
+
if high_degree_nbrs in auxiliary:
|
186 |
+
auxiliary[high_degree_nbrs].add(node)
|
187 |
+
else:
|
188 |
+
auxiliary[high_degree_nbrs] = {node}
|
189 |
+
|
190 |
+
if copy:
|
191 |
+
G = G.copy()
|
192 |
+
|
193 |
+
compressor_nodes = set()
|
194 |
+
for index, (high_degree_nodes, low_degree_nodes) in enumerate(auxiliary.items()):
|
195 |
+
low_degree_node_count = len(low_degree_nodes)
|
196 |
+
high_degree_node_count = len(high_degree_nodes)
|
197 |
+
old_edges = high_degree_node_count * low_degree_node_count
|
198 |
+
new_edges = high_degree_node_count + low_degree_node_count
|
199 |
+
if old_edges <= new_edges:
|
200 |
+
continue
|
201 |
+
compression_node = "".join(str(node) for node in high_degree_nodes)
|
202 |
+
if prefix:
|
203 |
+
compression_node = str(prefix) + compression_node
|
204 |
+
for node in low_degree_nodes:
|
205 |
+
for high_node in high_degree_nodes:
|
206 |
+
if G.has_edge(node, high_node):
|
207 |
+
G.remove_edge(node, high_node)
|
208 |
+
|
209 |
+
G.add_edge(node, compression_node)
|
210 |
+
for node in high_degree_nodes:
|
211 |
+
G.add_edge(compression_node, node)
|
212 |
+
compressor_nodes.add(compression_node)
|
213 |
+
return G, compressor_nodes
|
214 |
+
|
215 |
+
|
216 |
+
def _snap_build_graph(
|
217 |
+
G,
|
218 |
+
groups,
|
219 |
+
node_attributes,
|
220 |
+
edge_attributes,
|
221 |
+
neighbor_info,
|
222 |
+
edge_types,
|
223 |
+
prefix,
|
224 |
+
supernode_attribute,
|
225 |
+
superedge_attribute,
|
226 |
+
):
|
227 |
+
"""
|
228 |
+
Build the summary graph from the data structures produced in the SNAP aggregation algorithm
|
229 |
+
|
230 |
+
Used in the SNAP aggregation algorithm to build the output summary graph and supernode
|
231 |
+
lookup dictionary. This process uses the original graph and the data structures to
|
232 |
+
create the supernodes with the correct node attributes, and the superedges with the correct
|
233 |
+
edge attributes
|
234 |
+
|
235 |
+
Parameters
|
236 |
+
----------
|
237 |
+
G: networkx.Graph
|
238 |
+
the original graph to be summarized
|
239 |
+
groups: dict
|
240 |
+
A dictionary of unique group IDs and their corresponding node groups
|
241 |
+
node_attributes: iterable
|
242 |
+
An iterable of the node attributes considered in the summarization process
|
243 |
+
edge_attributes: iterable
|
244 |
+
An iterable of the edge attributes considered in the summarization process
|
245 |
+
neighbor_info: dict
|
246 |
+
A data structure indicating the number of edges a node has with the
|
247 |
+
groups in the current summarization of each edge type
|
248 |
+
edge_types: dict
|
249 |
+
dictionary of edges in the graph and their corresponding attributes recognized
|
250 |
+
in the summarization
|
251 |
+
prefix: string
|
252 |
+
The prefix to be added to all supernodes
|
253 |
+
supernode_attribute: str
|
254 |
+
The node attribute for recording the supernode groupings of nodes
|
255 |
+
superedge_attribute: str
|
256 |
+
The edge attribute for recording the edge types represented by superedges
|
257 |
+
|
258 |
+
Returns
|
259 |
+
-------
|
260 |
+
summary graph: Networkx graph
|
261 |
+
"""
|
262 |
+
output = G.__class__()
|
263 |
+
node_label_lookup = {}
|
264 |
+
for index, group_id in enumerate(groups):
|
265 |
+
group_set = groups[group_id]
|
266 |
+
supernode = f"{prefix}{index}"
|
267 |
+
node_label_lookup[group_id] = supernode
|
268 |
+
supernode_attributes = {
|
269 |
+
attr: G.nodes[next(iter(group_set))][attr] for attr in node_attributes
|
270 |
+
}
|
271 |
+
supernode_attributes[supernode_attribute] = group_set
|
272 |
+
output.add_node(supernode, **supernode_attributes)
|
273 |
+
|
274 |
+
for group_id in groups:
|
275 |
+
group_set = groups[group_id]
|
276 |
+
source_supernode = node_label_lookup[group_id]
|
277 |
+
for other_group, group_edge_types in neighbor_info[
|
278 |
+
next(iter(group_set))
|
279 |
+
].items():
|
280 |
+
if group_edge_types:
|
281 |
+
target_supernode = node_label_lookup[other_group]
|
282 |
+
summary_graph_edge = (source_supernode, target_supernode)
|
283 |
+
|
284 |
+
edge_types = [
|
285 |
+
dict(zip(edge_attributes, edge_type))
|
286 |
+
for edge_type in group_edge_types
|
287 |
+
]
|
288 |
+
|
289 |
+
has_edge = output.has_edge(*summary_graph_edge)
|
290 |
+
if output.is_multigraph():
|
291 |
+
if not has_edge:
|
292 |
+
for edge_type in edge_types:
|
293 |
+
output.add_edge(*summary_graph_edge, **edge_type)
|
294 |
+
elif not output.is_directed():
|
295 |
+
existing_edge_data = output.get_edge_data(*summary_graph_edge)
|
296 |
+
for edge_type in edge_types:
|
297 |
+
if edge_type not in existing_edge_data.values():
|
298 |
+
output.add_edge(*summary_graph_edge, **edge_type)
|
299 |
+
else:
|
300 |
+
superedge_attributes = {superedge_attribute: edge_types}
|
301 |
+
output.add_edge(*summary_graph_edge, **superedge_attributes)
|
302 |
+
|
303 |
+
return output
|
304 |
+
|
305 |
+
|
306 |
+
def _snap_eligible_group(G, groups, group_lookup, edge_types):
|
307 |
+
"""
|
308 |
+
Determines if a group is eligible to be split.
|
309 |
+
|
310 |
+
A group is eligible to be split if all nodes in the group have edges of the same type(s)
|
311 |
+
with the same other groups.
|
312 |
+
|
313 |
+
Parameters
|
314 |
+
----------
|
315 |
+
G: graph
|
316 |
+
graph to be summarized
|
317 |
+
groups: dict
|
318 |
+
A dictionary of unique group IDs and their corresponding node groups
|
319 |
+
group_lookup: dict
|
320 |
+
dictionary of nodes and their current corresponding group ID
|
321 |
+
edge_types: dict
|
322 |
+
dictionary of edges in the graph and their corresponding attributes recognized
|
323 |
+
in the summarization
|
324 |
+
|
325 |
+
Returns
|
326 |
+
-------
|
327 |
+
tuple: group ID to split, and neighbor-groups participation_counts data structure
|
328 |
+
"""
|
329 |
+
nbr_info = {node: {gid: Counter() for gid in groups} for node in group_lookup}
|
330 |
+
for group_id in groups:
|
331 |
+
current_group = groups[group_id]
|
332 |
+
|
333 |
+
# build nbr_info for nodes in group
|
334 |
+
for node in current_group:
|
335 |
+
nbr_info[node] = {group_id: Counter() for group_id in groups}
|
336 |
+
edges = G.edges(node, keys=True) if G.is_multigraph() else G.edges(node)
|
337 |
+
for edge in edges:
|
338 |
+
neighbor = edge[1]
|
339 |
+
edge_type = edge_types[edge]
|
340 |
+
neighbor_group_id = group_lookup[neighbor]
|
341 |
+
nbr_info[node][neighbor_group_id][edge_type] += 1
|
342 |
+
|
343 |
+
# check if group_id is eligible to be split
|
344 |
+
group_size = len(current_group)
|
345 |
+
for other_group_id in groups:
|
346 |
+
edge_counts = Counter()
|
347 |
+
for node in current_group:
|
348 |
+
edge_counts.update(nbr_info[node][other_group_id].keys())
|
349 |
+
|
350 |
+
if not all(count == group_size for count in edge_counts.values()):
|
351 |
+
# only the nbr_info of the returned group_id is required for handling group splits
|
352 |
+
return group_id, nbr_info
|
353 |
+
|
354 |
+
# if no eligible groups, complete nbr_info is calculated
|
355 |
+
return None, nbr_info
|
356 |
+
|
357 |
+
|
358 |
+
def _snap_split(groups, neighbor_info, group_lookup, group_id):
|
359 |
+
"""
|
360 |
+
Splits a group based on edge types and updates the groups accordingly
|
361 |
+
|
362 |
+
Splits the group with the given group_id based on the edge types
|
363 |
+
of the nodes so that each new grouping will all have the same
|
364 |
+
edges with other nodes.
|
365 |
+
|
366 |
+
Parameters
|
367 |
+
----------
|
368 |
+
groups: dict
|
369 |
+
A dictionary of unique group IDs and their corresponding node groups
|
370 |
+
neighbor_info: dict
|
371 |
+
A data structure indicating the number of edges a node has with the
|
372 |
+
groups in the current summarization of each edge type
|
373 |
+
edge_types: dict
|
374 |
+
dictionary of edges in the graph and their corresponding attributes recognized
|
375 |
+
in the summarization
|
376 |
+
group_lookup: dict
|
377 |
+
dictionary of nodes and their current corresponding group ID
|
378 |
+
group_id: object
|
379 |
+
ID of group to be split
|
380 |
+
|
381 |
+
Returns
|
382 |
+
-------
|
383 |
+
dict
|
384 |
+
The updated groups based on the split
|
385 |
+
"""
|
386 |
+
new_group_mappings = defaultdict(set)
|
387 |
+
for node in groups[group_id]:
|
388 |
+
signature = tuple(
|
389 |
+
frozenset(edge_types) for edge_types in neighbor_info[node].values()
|
390 |
+
)
|
391 |
+
new_group_mappings[signature].add(node)
|
392 |
+
|
393 |
+
# leave the biggest new_group as the original group
|
394 |
+
new_groups = sorted(new_group_mappings.values(), key=len)
|
395 |
+
for new_group in new_groups[:-1]:
|
396 |
+
# Assign unused integer as the new_group_id
|
397 |
+
# ids are tuples, so will not interact with the original group_ids
|
398 |
+
new_group_id = len(groups)
|
399 |
+
groups[new_group_id] = new_group
|
400 |
+
groups[group_id] -= new_group
|
401 |
+
for node in new_group:
|
402 |
+
group_lookup[node] = new_group_id
|
403 |
+
|
404 |
+
return groups
|
405 |
+
|
406 |
+
|
407 |
+
@nx._dispatchable(
|
408 |
+
node_attrs="[node_attributes]", edge_attrs="[edge_attributes]", returns_graph=True
|
409 |
+
)
|
410 |
+
def snap_aggregation(
|
411 |
+
G,
|
412 |
+
node_attributes,
|
413 |
+
edge_attributes=(),
|
414 |
+
prefix="Supernode-",
|
415 |
+
supernode_attribute="group",
|
416 |
+
superedge_attribute="types",
|
417 |
+
):
|
418 |
+
"""Creates a summary graph based on attributes and connectivity.
|
419 |
+
|
420 |
+
This function uses the Summarization by Grouping Nodes on Attributes
|
421 |
+
and Pairwise edges (SNAP) algorithm for summarizing a given
|
422 |
+
graph by grouping nodes by node attributes and their edge attributes
|
423 |
+
into supernodes in a summary graph. This name SNAP should not be
|
424 |
+
confused with the Stanford Network Analysis Project (SNAP).
|
425 |
+
|
426 |
+
Here is a high-level view of how this algorithm works:
|
427 |
+
|
428 |
+
1) Group nodes by node attribute values.
|
429 |
+
|
430 |
+
2) Iteratively split groups until all nodes in each group have edges
|
431 |
+
to nodes in the same groups. That is, until all the groups are homogeneous
|
432 |
+
in their member nodes' edges to other groups. For example,
|
433 |
+
if all the nodes in group A only have edge to nodes in group B, then the
|
434 |
+
group is homogeneous and does not need to be split. If all nodes in group B
|
435 |
+
have edges with nodes in groups {A, C}, but some also have edges with other
|
436 |
+
nodes in B, then group B is not homogeneous and needs to be split into
|
437 |
+
groups have edges with {A, C} and a group of nodes having
|
438 |
+
edges with {A, B, C}. This way, viewers of the summary graph can
|
439 |
+
assume that all nodes in the group have the exact same node attributes and
|
440 |
+
the exact same edges.
|
441 |
+
|
442 |
+
3) Build the output summary graph, where the groups are represented by
|
443 |
+
super-nodes. Edges represent the edges shared between all the nodes in each
|
444 |
+
respective groups.
|
445 |
+
|
446 |
+
A SNAP summary graph can be used to visualize graphs that are too large to display
|
447 |
+
or visually analyze, or to efficiently identify sets of similar nodes with similar connectivity
|
448 |
+
patterns to other sets of similar nodes based on specified node and/or edge attributes in a graph.
|
449 |
+
|
450 |
+
Parameters
|
451 |
+
----------
|
452 |
+
G: graph
|
453 |
+
Networkx Graph to be summarized
|
454 |
+
node_attributes: iterable, required
|
455 |
+
An iterable of the node attributes used to group nodes in the summarization process. Nodes
|
456 |
+
with the same values for these attributes will be grouped together in the summary graph.
|
457 |
+
edge_attributes: iterable, optional
|
458 |
+
An iterable of the edge attributes considered in the summarization process. If provided, unique
|
459 |
+
combinations of the attribute values found in the graph are used to
|
460 |
+
determine the edge types in the graph. If not provided, all edges
|
461 |
+
are considered to be of the same type.
|
462 |
+
prefix: str
|
463 |
+
The prefix used to denote supernodes in the summary graph. Defaults to 'Supernode-'.
|
464 |
+
supernode_attribute: str
|
465 |
+
The node attribute for recording the supernode groupings of nodes. Defaults to 'group'.
|
466 |
+
superedge_attribute: str
|
467 |
+
The edge attribute for recording the edge types of multiple edges. Defaults to 'types'.
|
468 |
+
|
469 |
+
Returns
|
470 |
+
-------
|
471 |
+
networkx.Graph: summary graph
|
472 |
+
|
473 |
+
Examples
|
474 |
+
--------
|
475 |
+
SNAP aggregation takes a graph and summarizes it in the context of user-provided
|
476 |
+
node and edge attributes such that a viewer can more easily extract and
|
477 |
+
analyze the information represented by the graph
|
478 |
+
|
479 |
+
>>> nodes = {
|
480 |
+
... "A": dict(color="Red"),
|
481 |
+
... "B": dict(color="Red"),
|
482 |
+
... "C": dict(color="Red"),
|
483 |
+
... "D": dict(color="Red"),
|
484 |
+
... "E": dict(color="Blue"),
|
485 |
+
... "F": dict(color="Blue"),
|
486 |
+
... }
|
487 |
+
>>> edges = [
|
488 |
+
... ("A", "E", "Strong"),
|
489 |
+
... ("B", "F", "Strong"),
|
490 |
+
... ("C", "E", "Weak"),
|
491 |
+
... ("D", "F", "Weak"),
|
492 |
+
... ]
|
493 |
+
>>> G = nx.Graph()
|
494 |
+
>>> for node in nodes:
|
495 |
+
... attributes = nodes[node]
|
496 |
+
... G.add_node(node, **attributes)
|
497 |
+
>>> for source, target, type in edges:
|
498 |
+
... G.add_edge(source, target, type=type)
|
499 |
+
>>> node_attributes = ("color",)
|
500 |
+
>>> edge_attributes = ("type",)
|
501 |
+
>>> summary_graph = nx.snap_aggregation(
|
502 |
+
... G, node_attributes=node_attributes, edge_attributes=edge_attributes
|
503 |
+
... )
|
504 |
+
|
505 |
+
Notes
|
506 |
+
-----
|
507 |
+
The summary graph produced is called a maximum Attribute-edge
|
508 |
+
compatible (AR-compatible) grouping. According to [1]_, an
|
509 |
+
AR-compatible grouping means that all nodes in each group have the same
|
510 |
+
exact node attribute values and the same exact edges and
|
511 |
+
edge types to one or more nodes in the same groups. The maximal
|
512 |
+
AR-compatible grouping is the grouping with the minimal cardinality.
|
513 |
+
|
514 |
+
The AR-compatible grouping is the most detailed grouping provided by
|
515 |
+
any of the SNAP algorithms.
|
516 |
+
|
517 |
+
References
|
518 |
+
----------
|
519 |
+
.. [1] Y. Tian, R. A. Hankins, and J. M. Patel. Efficient aggregation
|
520 |
+
for graph summarization. In Proc. 2008 ACM-SIGMOD Int. Conf.
|
521 |
+
Management of Data (SIGMOD’08), pages 567–580, Vancouver, Canada,
|
522 |
+
June 2008.
|
523 |
+
"""
|
524 |
+
edge_types = {
|
525 |
+
edge: tuple(attrs.get(attr) for attr in edge_attributes)
|
526 |
+
for edge, attrs in G.edges.items()
|
527 |
+
}
|
528 |
+
if not G.is_directed():
|
529 |
+
if G.is_multigraph():
|
530 |
+
# list is needed to avoid mutating while iterating
|
531 |
+
edges = [((v, u, k), etype) for (u, v, k), etype in edge_types.items()]
|
532 |
+
else:
|
533 |
+
# list is needed to avoid mutating while iterating
|
534 |
+
edges = [((v, u), etype) for (u, v), etype in edge_types.items()]
|
535 |
+
edge_types.update(edges)
|
536 |
+
|
537 |
+
group_lookup = {
|
538 |
+
node: tuple(attrs[attr] for attr in node_attributes)
|
539 |
+
for node, attrs in G.nodes.items()
|
540 |
+
}
|
541 |
+
groups = defaultdict(set)
|
542 |
+
for node, node_type in group_lookup.items():
|
543 |
+
groups[node_type].add(node)
|
544 |
+
|
545 |
+
eligible_group_id, nbr_info = _snap_eligible_group(
|
546 |
+
G, groups, group_lookup, edge_types
|
547 |
+
)
|
548 |
+
while eligible_group_id:
|
549 |
+
groups = _snap_split(groups, nbr_info, group_lookup, eligible_group_id)
|
550 |
+
eligible_group_id, nbr_info = _snap_eligible_group(
|
551 |
+
G, groups, group_lookup, edge_types
|
552 |
+
)
|
553 |
+
return _snap_build_graph(
|
554 |
+
G,
|
555 |
+
groups,
|
556 |
+
node_attributes,
|
557 |
+
edge_attributes,
|
558 |
+
nbr_info,
|
559 |
+
edge_types,
|
560 |
+
prefix,
|
561 |
+
supernode_attribute,
|
562 |
+
superedge_attribute,
|
563 |
+
)
|
venv/lib/python3.10/site-packages/networkx/algorithms/swap.py
ADDED
@@ -0,0 +1,407 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Swap edges in a graph.
|
2 |
+
"""
|
3 |
+
|
4 |
+
import math
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils import py_random_state
|
8 |
+
|
9 |
+
__all__ = ["double_edge_swap", "connected_double_edge_swap", "directed_edge_swap"]
|
10 |
+
|
11 |
+
|
12 |
+
@nx.utils.not_implemented_for("undirected")
|
13 |
+
@py_random_state(3)
|
14 |
+
@nx._dispatchable(mutates_input=True, returns_graph=True)
|
15 |
+
def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None):
|
16 |
+
"""Swap three edges in a directed graph while keeping the node degrees fixed.
|
17 |
+
|
18 |
+
A directed edge swap swaps three edges such that a -> b -> c -> d becomes
|
19 |
+
a -> c -> b -> d. This pattern of swapping allows all possible states with the
|
20 |
+
same in- and out-degree distribution in a directed graph to be reached.
|
21 |
+
|
22 |
+
If the swap would create parallel edges (e.g. if a -> c already existed in the
|
23 |
+
previous example), another attempt is made to find a suitable trio of edges.
|
24 |
+
|
25 |
+
Parameters
|
26 |
+
----------
|
27 |
+
G : DiGraph
|
28 |
+
A directed graph
|
29 |
+
|
30 |
+
nswap : integer (optional, default=1)
|
31 |
+
Number of three-edge (directed) swaps to perform
|
32 |
+
|
33 |
+
max_tries : integer (optional, default=100)
|
34 |
+
Maximum number of attempts to swap edges
|
35 |
+
|
36 |
+
seed : integer, random_state, or None (default)
|
37 |
+
Indicator of random number generation state.
|
38 |
+
See :ref:`Randomness<randomness>`.
|
39 |
+
|
40 |
+
Returns
|
41 |
+
-------
|
42 |
+
G : DiGraph
|
43 |
+
The graph after the edges are swapped.
|
44 |
+
|
45 |
+
Raises
|
46 |
+
------
|
47 |
+
NetworkXError
|
48 |
+
If `G` is not directed, or
|
49 |
+
If nswap > max_tries, or
|
50 |
+
If there are fewer than 4 nodes or 3 edges in `G`.
|
51 |
+
NetworkXAlgorithmError
|
52 |
+
If the number of swap attempts exceeds `max_tries` before `nswap` swaps are made
|
53 |
+
|
54 |
+
Notes
|
55 |
+
-----
|
56 |
+
Does not enforce any connectivity constraints.
|
57 |
+
|
58 |
+
The graph G is modified in place.
|
59 |
+
|
60 |
+
A later swap is allowed to undo a previous swap.
|
61 |
+
|
62 |
+
References
|
63 |
+
----------
|
64 |
+
.. [1] Erdős, Péter L., et al. “A Simple Havel-Hakimi Type Algorithm to Realize
|
65 |
+
Graphical Degree Sequences of Directed Graphs.” ArXiv:0905.4913 [Math],
|
66 |
+
Jan. 2010. https://doi.org/10.48550/arXiv.0905.4913.
|
67 |
+
Published 2010 in Elec. J. Combinatorics (17(1)). R66.
|
68 |
+
http://www.combinatorics.org/Volume_17/PDF/v17i1r66.pdf
|
69 |
+
.. [2] “Combinatorics - Reaching All Possible Simple Directed Graphs with a given
|
70 |
+
Degree Sequence with 2-Edge Swaps.” Mathematics Stack Exchange,
|
71 |
+
https://math.stackexchange.com/questions/22272/. Accessed 30 May 2022.
|
72 |
+
"""
|
73 |
+
if nswap > max_tries:
|
74 |
+
raise nx.NetworkXError("Number of swaps > number of tries allowed.")
|
75 |
+
if len(G) < 4:
|
76 |
+
raise nx.NetworkXError("DiGraph has fewer than four nodes.")
|
77 |
+
if len(G.edges) < 3:
|
78 |
+
raise nx.NetworkXError("DiGraph has fewer than 3 edges")
|
79 |
+
|
80 |
+
# Instead of choosing uniformly at random from a generated edge list,
|
81 |
+
# this algorithm chooses nonuniformly from the set of nodes with
|
82 |
+
# probability weighted by degree.
|
83 |
+
tries = 0
|
84 |
+
swapcount = 0
|
85 |
+
keys, degrees = zip(*G.degree()) # keys, degree
|
86 |
+
cdf = nx.utils.cumulative_distribution(degrees) # cdf of degree
|
87 |
+
discrete_sequence = nx.utils.discrete_sequence
|
88 |
+
|
89 |
+
while swapcount < nswap:
|
90 |
+
# choose source node index from discrete distribution
|
91 |
+
start_index = discrete_sequence(1, cdistribution=cdf, seed=seed)[0]
|
92 |
+
start = keys[start_index]
|
93 |
+
tries += 1
|
94 |
+
|
95 |
+
if tries > max_tries:
|
96 |
+
msg = f"Maximum number of swap attempts ({tries}) exceeded before desired swaps achieved ({nswap})."
|
97 |
+
raise nx.NetworkXAlgorithmError(msg)
|
98 |
+
|
99 |
+
# If the given node doesn't have any out edges, then there isn't anything to swap
|
100 |
+
if G.out_degree(start) == 0:
|
101 |
+
continue
|
102 |
+
second = seed.choice(list(G.succ[start]))
|
103 |
+
if start == second:
|
104 |
+
continue
|
105 |
+
|
106 |
+
if G.out_degree(second) == 0:
|
107 |
+
continue
|
108 |
+
third = seed.choice(list(G.succ[second]))
|
109 |
+
if second == third:
|
110 |
+
continue
|
111 |
+
|
112 |
+
if G.out_degree(third) == 0:
|
113 |
+
continue
|
114 |
+
fourth = seed.choice(list(G.succ[third]))
|
115 |
+
if third == fourth:
|
116 |
+
continue
|
117 |
+
|
118 |
+
if (
|
119 |
+
third not in G.succ[start]
|
120 |
+
and fourth not in G.succ[second]
|
121 |
+
and second not in G.succ[third]
|
122 |
+
):
|
123 |
+
# Swap nodes
|
124 |
+
G.add_edge(start, third)
|
125 |
+
G.add_edge(third, second)
|
126 |
+
G.add_edge(second, fourth)
|
127 |
+
G.remove_edge(start, second)
|
128 |
+
G.remove_edge(second, third)
|
129 |
+
G.remove_edge(third, fourth)
|
130 |
+
swapcount += 1
|
131 |
+
|
132 |
+
return G
|
133 |
+
|
134 |
+
|
135 |
+
@py_random_state(3)
|
136 |
+
@nx._dispatchable(mutates_input=True, returns_graph=True)
|
137 |
+
def double_edge_swap(G, nswap=1, max_tries=100, seed=None):
|
138 |
+
"""Swap two edges in the graph while keeping the node degrees fixed.
|
139 |
+
|
140 |
+
A double-edge swap removes two randomly chosen edges u-v and x-y
|
141 |
+
and creates the new edges u-x and v-y::
|
142 |
+
|
143 |
+
u--v u v
|
144 |
+
becomes | |
|
145 |
+
x--y x y
|
146 |
+
|
147 |
+
If either the edge u-x or v-y already exist no swap is performed
|
148 |
+
and another attempt is made to find a suitable edge pair.
|
149 |
+
|
150 |
+
Parameters
|
151 |
+
----------
|
152 |
+
G : graph
|
153 |
+
An undirected graph
|
154 |
+
|
155 |
+
nswap : integer (optional, default=1)
|
156 |
+
Number of double-edge swaps to perform
|
157 |
+
|
158 |
+
max_tries : integer (optional)
|
159 |
+
Maximum number of attempts to swap edges
|
160 |
+
|
161 |
+
seed : integer, random_state, or None (default)
|
162 |
+
Indicator of random number generation state.
|
163 |
+
See :ref:`Randomness<randomness>`.
|
164 |
+
|
165 |
+
Returns
|
166 |
+
-------
|
167 |
+
G : graph
|
168 |
+
The graph after double edge swaps.
|
169 |
+
|
170 |
+
Raises
|
171 |
+
------
|
172 |
+
NetworkXError
|
173 |
+
If `G` is directed, or
|
174 |
+
If `nswap` > `max_tries`, or
|
175 |
+
If there are fewer than 4 nodes or 2 edges in `G`.
|
176 |
+
NetworkXAlgorithmError
|
177 |
+
If the number of swap attempts exceeds `max_tries` before `nswap` swaps are made
|
178 |
+
|
179 |
+
Notes
|
180 |
+
-----
|
181 |
+
Does not enforce any connectivity constraints.
|
182 |
+
|
183 |
+
The graph G is modified in place.
|
184 |
+
"""
|
185 |
+
if G.is_directed():
|
186 |
+
raise nx.NetworkXError(
|
187 |
+
"double_edge_swap() not defined for directed graphs. Use directed_edge_swap instead."
|
188 |
+
)
|
189 |
+
if nswap > max_tries:
|
190 |
+
raise nx.NetworkXError("Number of swaps > number of tries allowed.")
|
191 |
+
if len(G) < 4:
|
192 |
+
raise nx.NetworkXError("Graph has fewer than four nodes.")
|
193 |
+
if len(G.edges) < 2:
|
194 |
+
raise nx.NetworkXError("Graph has fewer than 2 edges")
|
195 |
+
# Instead of choosing uniformly at random from a generated edge list,
|
196 |
+
# this algorithm chooses nonuniformly from the set of nodes with
|
197 |
+
# probability weighted by degree.
|
198 |
+
n = 0
|
199 |
+
swapcount = 0
|
200 |
+
keys, degrees = zip(*G.degree()) # keys, degree
|
201 |
+
cdf = nx.utils.cumulative_distribution(degrees) # cdf of degree
|
202 |
+
discrete_sequence = nx.utils.discrete_sequence
|
203 |
+
while swapcount < nswap:
|
204 |
+
# if random.random() < 0.5: continue # trick to avoid periodicities?
|
205 |
+
# pick two random edges without creating edge list
|
206 |
+
# choose source node indices from discrete distribution
|
207 |
+
(ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed)
|
208 |
+
if ui == xi:
|
209 |
+
continue # same source, skip
|
210 |
+
u = keys[ui] # convert index to label
|
211 |
+
x = keys[xi]
|
212 |
+
# choose target uniformly from neighbors
|
213 |
+
v = seed.choice(list(G[u]))
|
214 |
+
y = seed.choice(list(G[x]))
|
215 |
+
if v == y:
|
216 |
+
continue # same target, skip
|
217 |
+
if (x not in G[u]) and (y not in G[v]): # don't create parallel edges
|
218 |
+
G.add_edge(u, x)
|
219 |
+
G.add_edge(v, y)
|
220 |
+
G.remove_edge(u, v)
|
221 |
+
G.remove_edge(x, y)
|
222 |
+
swapcount += 1
|
223 |
+
if n >= max_tries:
|
224 |
+
e = (
|
225 |
+
f"Maximum number of swap attempts ({n}) exceeded "
|
226 |
+
f"before desired swaps achieved ({nswap})."
|
227 |
+
)
|
228 |
+
raise nx.NetworkXAlgorithmError(e)
|
229 |
+
n += 1
|
230 |
+
return G
|
231 |
+
|
232 |
+
|
233 |
+
@py_random_state(3)
|
234 |
+
@nx._dispatchable(mutates_input=True)
|
235 |
+
def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None):
|
236 |
+
"""Attempts the specified number of double-edge swaps in the graph `G`.
|
237 |
+
|
238 |
+
A double-edge swap removes two randomly chosen edges `(u, v)` and `(x,
|
239 |
+
y)` and creates the new edges `(u, x)` and `(v, y)`::
|
240 |
+
|
241 |
+
u--v u v
|
242 |
+
becomes | |
|
243 |
+
x--y x y
|
244 |
+
|
245 |
+
If either `(u, x)` or `(v, y)` already exist, then no swap is performed
|
246 |
+
so the actual number of swapped edges is always *at most* `nswap`.
|
247 |
+
|
248 |
+
Parameters
|
249 |
+
----------
|
250 |
+
G : graph
|
251 |
+
An undirected graph
|
252 |
+
|
253 |
+
nswap : integer (optional, default=1)
|
254 |
+
Number of double-edge swaps to perform
|
255 |
+
|
256 |
+
_window_threshold : integer
|
257 |
+
|
258 |
+
The window size below which connectedness of the graph will be checked
|
259 |
+
after each swap.
|
260 |
+
|
261 |
+
The "window" in this function is a dynamically updated integer that
|
262 |
+
represents the number of swap attempts to make before checking if the
|
263 |
+
graph remains connected. It is an optimization used to decrease the
|
264 |
+
running time of the algorithm in exchange for increased complexity of
|
265 |
+
implementation.
|
266 |
+
|
267 |
+
If the window size is below this threshold, then the algorithm checks
|
268 |
+
after each swap if the graph remains connected by checking if there is a
|
269 |
+
path joining the two nodes whose edge was just removed. If the window
|
270 |
+
size is above this threshold, then the algorithm performs do all the
|
271 |
+
swaps in the window and only then check if the graph is still connected.
|
272 |
+
|
273 |
+
seed : integer, random_state, or None (default)
|
274 |
+
Indicator of random number generation state.
|
275 |
+
See :ref:`Randomness<randomness>`.
|
276 |
+
|
277 |
+
Returns
|
278 |
+
-------
|
279 |
+
int
|
280 |
+
The number of successful swaps
|
281 |
+
|
282 |
+
Raises
|
283 |
+
------
|
284 |
+
|
285 |
+
NetworkXError
|
286 |
+
|
287 |
+
If the input graph is not connected, or if the graph has fewer than four
|
288 |
+
nodes.
|
289 |
+
|
290 |
+
Notes
|
291 |
+
-----
|
292 |
+
|
293 |
+
The initial graph `G` must be connected, and the resulting graph is
|
294 |
+
connected. The graph `G` is modified in place.
|
295 |
+
|
296 |
+
References
|
297 |
+
----------
|
298 |
+
.. [1] C. Gkantsidis and M. Mihail and E. Zegura,
|
299 |
+
The Markov chain simulation method for generating connected
|
300 |
+
power law random graphs, 2003.
|
301 |
+
http://citeseer.ist.psu.edu/gkantsidis03markov.html
|
302 |
+
"""
|
303 |
+
if not nx.is_connected(G):
|
304 |
+
raise nx.NetworkXError("Graph not connected")
|
305 |
+
if len(G) < 4:
|
306 |
+
raise nx.NetworkXError("Graph has fewer than four nodes.")
|
307 |
+
n = 0
|
308 |
+
swapcount = 0
|
309 |
+
deg = G.degree()
|
310 |
+
# Label key for nodes
|
311 |
+
dk = [n for n, d in G.degree()]
|
312 |
+
cdf = nx.utils.cumulative_distribution([d for n, d in G.degree()])
|
313 |
+
discrete_sequence = nx.utils.discrete_sequence
|
314 |
+
window = 1
|
315 |
+
while n < nswap:
|
316 |
+
wcount = 0
|
317 |
+
swapped = []
|
318 |
+
# If the window is small, we just check each time whether the graph is
|
319 |
+
# connected by checking if the nodes that were just separated are still
|
320 |
+
# connected.
|
321 |
+
if window < _window_threshold:
|
322 |
+
# This Boolean keeps track of whether there was a failure or not.
|
323 |
+
fail = False
|
324 |
+
while wcount < window and n < nswap:
|
325 |
+
# Pick two random edges without creating the edge list. Choose
|
326 |
+
# source nodes from the discrete degree distribution.
|
327 |
+
(ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed)
|
328 |
+
# If the source nodes are the same, skip this pair.
|
329 |
+
if ui == xi:
|
330 |
+
continue
|
331 |
+
# Convert an index to a node label.
|
332 |
+
u = dk[ui]
|
333 |
+
x = dk[xi]
|
334 |
+
# Choose targets uniformly from neighbors.
|
335 |
+
v = seed.choice(list(G.neighbors(u)))
|
336 |
+
y = seed.choice(list(G.neighbors(x)))
|
337 |
+
# If the target nodes are the same, skip this pair.
|
338 |
+
if v == y:
|
339 |
+
continue
|
340 |
+
if x not in G[u] and y not in G[v]:
|
341 |
+
G.remove_edge(u, v)
|
342 |
+
G.remove_edge(x, y)
|
343 |
+
G.add_edge(u, x)
|
344 |
+
G.add_edge(v, y)
|
345 |
+
swapped.append((u, v, x, y))
|
346 |
+
swapcount += 1
|
347 |
+
n += 1
|
348 |
+
# If G remains connected...
|
349 |
+
if nx.has_path(G, u, v):
|
350 |
+
wcount += 1
|
351 |
+
# Otherwise, undo the changes.
|
352 |
+
else:
|
353 |
+
G.add_edge(u, v)
|
354 |
+
G.add_edge(x, y)
|
355 |
+
G.remove_edge(u, x)
|
356 |
+
G.remove_edge(v, y)
|
357 |
+
swapcount -= 1
|
358 |
+
fail = True
|
359 |
+
# If one of the swaps failed, reduce the window size.
|
360 |
+
if fail:
|
361 |
+
window = math.ceil(window / 2)
|
362 |
+
else:
|
363 |
+
window += 1
|
364 |
+
# If the window is large, then there is a good chance that a bunch of
|
365 |
+
# swaps will work. It's quicker to do all those swaps first and then
|
366 |
+
# check if the graph remains connected.
|
367 |
+
else:
|
368 |
+
while wcount < window and n < nswap:
|
369 |
+
# Pick two random edges without creating the edge list. Choose
|
370 |
+
# source nodes from the discrete degree distribution.
|
371 |
+
(ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed)
|
372 |
+
# If the source nodes are the same, skip this pair.
|
373 |
+
if ui == xi:
|
374 |
+
continue
|
375 |
+
# Convert an index to a node label.
|
376 |
+
u = dk[ui]
|
377 |
+
x = dk[xi]
|
378 |
+
# Choose targets uniformly from neighbors.
|
379 |
+
v = seed.choice(list(G.neighbors(u)))
|
380 |
+
y = seed.choice(list(G.neighbors(x)))
|
381 |
+
# If the target nodes are the same, skip this pair.
|
382 |
+
if v == y:
|
383 |
+
continue
|
384 |
+
if x not in G[u] and y not in G[v]:
|
385 |
+
G.remove_edge(u, v)
|
386 |
+
G.remove_edge(x, y)
|
387 |
+
G.add_edge(u, x)
|
388 |
+
G.add_edge(v, y)
|
389 |
+
swapped.append((u, v, x, y))
|
390 |
+
swapcount += 1
|
391 |
+
n += 1
|
392 |
+
wcount += 1
|
393 |
+
# If the graph remains connected, increase the window size.
|
394 |
+
if nx.is_connected(G):
|
395 |
+
window += 1
|
396 |
+
# Otherwise, undo the changes from the previous window and decrease
|
397 |
+
# the window size.
|
398 |
+
else:
|
399 |
+
while swapped:
|
400 |
+
(u, v, x, y) = swapped.pop()
|
401 |
+
G.add_edge(u, v)
|
402 |
+
G.add_edge(x, y)
|
403 |
+
G.remove_edge(u, x)
|
404 |
+
G.remove_edge(v, y)
|
405 |
+
swapcount -= 1
|
406 |
+
window = math.ceil(window / 2)
|
407 |
+
return swapcount
|
venv/lib/python3.10/site-packages/networkx/algorithms/threshold.py
ADDED
@@ -0,0 +1,979 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Threshold Graphs - Creation, manipulation and identification.
|
3 |
+
"""
|
4 |
+
from math import sqrt
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils import py_random_state
|
8 |
+
|
9 |
+
__all__ = ["is_threshold_graph", "find_threshold_graph"]
|
10 |
+
|
11 |
+
|
12 |
+
@nx._dispatchable
|
13 |
+
def is_threshold_graph(G):
|
14 |
+
"""
|
15 |
+
Returns `True` if `G` is a threshold graph.
|
16 |
+
|
17 |
+
Parameters
|
18 |
+
----------
|
19 |
+
G : NetworkX graph instance
|
20 |
+
An instance of `Graph`, `DiGraph`, `MultiGraph` or `MultiDiGraph`
|
21 |
+
|
22 |
+
Returns
|
23 |
+
-------
|
24 |
+
bool
|
25 |
+
`True` if `G` is a threshold graph, `False` otherwise.
|
26 |
+
|
27 |
+
Examples
|
28 |
+
--------
|
29 |
+
>>> from networkx.algorithms.threshold import is_threshold_graph
|
30 |
+
>>> G = nx.path_graph(3)
|
31 |
+
>>> is_threshold_graph(G)
|
32 |
+
True
|
33 |
+
>>> G = nx.barbell_graph(3, 3)
|
34 |
+
>>> is_threshold_graph(G)
|
35 |
+
False
|
36 |
+
|
37 |
+
References
|
38 |
+
----------
|
39 |
+
.. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph
|
40 |
+
"""
|
41 |
+
return is_threshold_sequence([d for n, d in G.degree()])
|
42 |
+
|
43 |
+
|
44 |
+
def is_threshold_sequence(degree_sequence):
|
45 |
+
"""
|
46 |
+
Returns True if the sequence is a threshold degree sequence.
|
47 |
+
|
48 |
+
Uses the property that a threshold graph must be constructed by
|
49 |
+
adding either dominating or isolated nodes. Thus, it can be
|
50 |
+
deconstructed iteratively by removing a node of degree zero or a
|
51 |
+
node that connects to the remaining nodes. If this deconstruction
|
52 |
+
fails then the sequence is not a threshold sequence.
|
53 |
+
"""
|
54 |
+
ds = degree_sequence[:] # get a copy so we don't destroy original
|
55 |
+
ds.sort()
|
56 |
+
while ds:
|
57 |
+
if ds[0] == 0: # if isolated node
|
58 |
+
ds.pop(0) # remove it
|
59 |
+
continue
|
60 |
+
if ds[-1] != len(ds) - 1: # is the largest degree node dominating?
|
61 |
+
return False # no, not a threshold degree sequence
|
62 |
+
ds.pop() # yes, largest is the dominating node
|
63 |
+
ds = [d - 1 for d in ds] # remove it and decrement all degrees
|
64 |
+
return True
|
65 |
+
|
66 |
+
|
67 |
+
def creation_sequence(degree_sequence, with_labels=False, compact=False):
|
68 |
+
"""
|
69 |
+
Determines the creation sequence for the given threshold degree sequence.
|
70 |
+
|
71 |
+
The creation sequence is a list of single characters 'd'
|
72 |
+
or 'i': 'd' for dominating or 'i' for isolated vertices.
|
73 |
+
Dominating vertices are connected to all vertices present when it
|
74 |
+
is added. The first node added is by convention 'd'.
|
75 |
+
This list can be converted to a string if desired using "".join(cs)
|
76 |
+
|
77 |
+
If with_labels==True:
|
78 |
+
Returns a list of 2-tuples containing the vertex number
|
79 |
+
and a character 'd' or 'i' which describes the type of vertex.
|
80 |
+
|
81 |
+
If compact==True:
|
82 |
+
Returns the creation sequence in a compact form that is the number
|
83 |
+
of 'i's and 'd's alternating.
|
84 |
+
Examples:
|
85 |
+
[1,2,2,3] represents d,i,i,d,d,i,i,i
|
86 |
+
[3,1,2] represents d,d,d,i,d,d
|
87 |
+
|
88 |
+
Notice that the first number is the first vertex to be used for
|
89 |
+
construction and so is always 'd'.
|
90 |
+
|
91 |
+
with_labels and compact cannot both be True.
|
92 |
+
|
93 |
+
Returns None if the sequence is not a threshold sequence
|
94 |
+
"""
|
95 |
+
if with_labels and compact:
|
96 |
+
raise ValueError("compact sequences cannot be labeled")
|
97 |
+
|
98 |
+
# make an indexed copy
|
99 |
+
if isinstance(degree_sequence, dict): # labeled degree sequence
|
100 |
+
ds = [[degree, label] for (label, degree) in degree_sequence.items()]
|
101 |
+
else:
|
102 |
+
ds = [[d, i] for i, d in enumerate(degree_sequence)]
|
103 |
+
ds.sort()
|
104 |
+
cs = [] # creation sequence
|
105 |
+
while ds:
|
106 |
+
if ds[0][0] == 0: # isolated node
|
107 |
+
(d, v) = ds.pop(0)
|
108 |
+
if len(ds) > 0: # make sure we start with a d
|
109 |
+
cs.insert(0, (v, "i"))
|
110 |
+
else:
|
111 |
+
cs.insert(0, (v, "d"))
|
112 |
+
continue
|
113 |
+
if ds[-1][0] != len(ds) - 1: # Not dominating node
|
114 |
+
return None # not a threshold degree sequence
|
115 |
+
(d, v) = ds.pop()
|
116 |
+
cs.insert(0, (v, "d"))
|
117 |
+
ds = [[d[0] - 1, d[1]] for d in ds] # decrement due to removing node
|
118 |
+
|
119 |
+
if with_labels:
|
120 |
+
return cs
|
121 |
+
if compact:
|
122 |
+
return make_compact(cs)
|
123 |
+
return [v[1] for v in cs] # not labeled
|
124 |
+
|
125 |
+
|
126 |
+
def make_compact(creation_sequence):
|
127 |
+
"""
|
128 |
+
Returns the creation sequence in a compact form
|
129 |
+
that is the number of 'i's and 'd's alternating.
|
130 |
+
|
131 |
+
Examples
|
132 |
+
--------
|
133 |
+
>>> from networkx.algorithms.threshold import make_compact
|
134 |
+
>>> make_compact(["d", "i", "i", "d", "d", "i", "i", "i"])
|
135 |
+
[1, 2, 2, 3]
|
136 |
+
>>> make_compact(["d", "d", "d", "i", "d", "d"])
|
137 |
+
[3, 1, 2]
|
138 |
+
|
139 |
+
Notice that the first number is the first vertex
|
140 |
+
to be used for construction and so is always 'd'.
|
141 |
+
|
142 |
+
Labeled creation sequences lose their labels in the
|
143 |
+
compact representation.
|
144 |
+
|
145 |
+
>>> make_compact([3, 1, 2])
|
146 |
+
[3, 1, 2]
|
147 |
+
"""
|
148 |
+
first = creation_sequence[0]
|
149 |
+
if isinstance(first, str): # creation sequence
|
150 |
+
cs = creation_sequence[:]
|
151 |
+
elif isinstance(first, tuple): # labeled creation sequence
|
152 |
+
cs = [s[1] for s in creation_sequence]
|
153 |
+
elif isinstance(first, int): # compact creation sequence
|
154 |
+
return creation_sequence
|
155 |
+
else:
|
156 |
+
raise TypeError("Not a valid creation sequence type")
|
157 |
+
|
158 |
+
ccs = []
|
159 |
+
count = 1 # count the run lengths of d's or i's.
|
160 |
+
for i in range(1, len(cs)):
|
161 |
+
if cs[i] == cs[i - 1]:
|
162 |
+
count += 1
|
163 |
+
else:
|
164 |
+
ccs.append(count)
|
165 |
+
count = 1
|
166 |
+
ccs.append(count) # don't forget the last one
|
167 |
+
return ccs
|
168 |
+
|
169 |
+
|
170 |
+
def uncompact(creation_sequence):
|
171 |
+
"""
|
172 |
+
Converts a compact creation sequence for a threshold
|
173 |
+
graph to a standard creation sequence (unlabeled).
|
174 |
+
If the creation_sequence is already standard, return it.
|
175 |
+
See creation_sequence.
|
176 |
+
"""
|
177 |
+
first = creation_sequence[0]
|
178 |
+
if isinstance(first, str): # creation sequence
|
179 |
+
return creation_sequence
|
180 |
+
elif isinstance(first, tuple): # labeled creation sequence
|
181 |
+
return creation_sequence
|
182 |
+
elif isinstance(first, int): # compact creation sequence
|
183 |
+
ccscopy = creation_sequence[:]
|
184 |
+
else:
|
185 |
+
raise TypeError("Not a valid creation sequence type")
|
186 |
+
cs = []
|
187 |
+
while ccscopy:
|
188 |
+
cs.extend(ccscopy.pop(0) * ["d"])
|
189 |
+
if ccscopy:
|
190 |
+
cs.extend(ccscopy.pop(0) * ["i"])
|
191 |
+
return cs
|
192 |
+
|
193 |
+
|
194 |
+
def creation_sequence_to_weights(creation_sequence):
|
195 |
+
"""
|
196 |
+
Returns a list of node weights which create the threshold
|
197 |
+
graph designated by the creation sequence. The weights
|
198 |
+
are scaled so that the threshold is 1.0. The order of the
|
199 |
+
nodes is the same as that in the creation sequence.
|
200 |
+
"""
|
201 |
+
# Turn input sequence into a labeled creation sequence
|
202 |
+
first = creation_sequence[0]
|
203 |
+
if isinstance(first, str): # creation sequence
|
204 |
+
if isinstance(creation_sequence, list):
|
205 |
+
wseq = creation_sequence[:]
|
206 |
+
else:
|
207 |
+
wseq = list(creation_sequence) # string like 'ddidid'
|
208 |
+
elif isinstance(first, tuple): # labeled creation sequence
|
209 |
+
wseq = [v[1] for v in creation_sequence]
|
210 |
+
elif isinstance(first, int): # compact creation sequence
|
211 |
+
wseq = uncompact(creation_sequence)
|
212 |
+
else:
|
213 |
+
raise TypeError("Not a valid creation sequence type")
|
214 |
+
# pass through twice--first backwards
|
215 |
+
wseq.reverse()
|
216 |
+
w = 0
|
217 |
+
prev = "i"
|
218 |
+
for j, s in enumerate(wseq):
|
219 |
+
if s == "i":
|
220 |
+
wseq[j] = w
|
221 |
+
prev = s
|
222 |
+
elif prev == "i":
|
223 |
+
prev = s
|
224 |
+
w += 1
|
225 |
+
wseq.reverse() # now pass through forwards
|
226 |
+
for j, s in enumerate(wseq):
|
227 |
+
if s == "d":
|
228 |
+
wseq[j] = w
|
229 |
+
prev = s
|
230 |
+
elif prev == "d":
|
231 |
+
prev = s
|
232 |
+
w += 1
|
233 |
+
# Now scale weights
|
234 |
+
if prev == "d":
|
235 |
+
w += 1
|
236 |
+
wscale = 1 / w
|
237 |
+
return [ww * wscale for ww in wseq]
|
238 |
+
# return wseq
|
239 |
+
|
240 |
+
|
241 |
+
def weights_to_creation_sequence(
|
242 |
+
weights, threshold=1, with_labels=False, compact=False
|
243 |
+
):
|
244 |
+
"""
|
245 |
+
Returns a creation sequence for a threshold graph
|
246 |
+
determined by the weights and threshold given as input.
|
247 |
+
If the sum of two node weights is greater than the
|
248 |
+
threshold value, an edge is created between these nodes.
|
249 |
+
|
250 |
+
The creation sequence is a list of single characters 'd'
|
251 |
+
or 'i': 'd' for dominating or 'i' for isolated vertices.
|
252 |
+
Dominating vertices are connected to all vertices present
|
253 |
+
when it is added. The first node added is by convention 'd'.
|
254 |
+
|
255 |
+
If with_labels==True:
|
256 |
+
Returns a list of 2-tuples containing the vertex number
|
257 |
+
and a character 'd' or 'i' which describes the type of vertex.
|
258 |
+
|
259 |
+
If compact==True:
|
260 |
+
Returns the creation sequence in a compact form that is the number
|
261 |
+
of 'i's and 'd's alternating.
|
262 |
+
Examples:
|
263 |
+
[1,2,2,3] represents d,i,i,d,d,i,i,i
|
264 |
+
[3,1,2] represents d,d,d,i,d,d
|
265 |
+
|
266 |
+
Notice that the first number is the first vertex to be used for
|
267 |
+
construction and so is always 'd'.
|
268 |
+
|
269 |
+
with_labels and compact cannot both be True.
|
270 |
+
"""
|
271 |
+
if with_labels and compact:
|
272 |
+
raise ValueError("compact sequences cannot be labeled")
|
273 |
+
|
274 |
+
# make an indexed copy
|
275 |
+
if isinstance(weights, dict): # labeled weights
|
276 |
+
wseq = [[w, label] for (label, w) in weights.items()]
|
277 |
+
else:
|
278 |
+
wseq = [[w, i] for i, w in enumerate(weights)]
|
279 |
+
wseq.sort()
|
280 |
+
cs = [] # creation sequence
|
281 |
+
cutoff = threshold - wseq[-1][0]
|
282 |
+
while wseq:
|
283 |
+
if wseq[0][0] < cutoff: # isolated node
|
284 |
+
(w, label) = wseq.pop(0)
|
285 |
+
cs.append((label, "i"))
|
286 |
+
else:
|
287 |
+
(w, label) = wseq.pop()
|
288 |
+
cs.append((label, "d"))
|
289 |
+
cutoff = threshold - wseq[-1][0]
|
290 |
+
if len(wseq) == 1: # make sure we start with a d
|
291 |
+
(w, label) = wseq.pop()
|
292 |
+
cs.append((label, "d"))
|
293 |
+
# put in correct order
|
294 |
+
cs.reverse()
|
295 |
+
|
296 |
+
if with_labels:
|
297 |
+
return cs
|
298 |
+
if compact:
|
299 |
+
return make_compact(cs)
|
300 |
+
return [v[1] for v in cs] # not labeled
|
301 |
+
|
302 |
+
|
303 |
+
# Manipulating NetworkX.Graphs in context of threshold graphs
|
304 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
305 |
+
def threshold_graph(creation_sequence, create_using=None):
|
306 |
+
"""
|
307 |
+
Create a threshold graph from the creation sequence or compact
|
308 |
+
creation_sequence.
|
309 |
+
|
310 |
+
The input sequence can be a
|
311 |
+
|
312 |
+
creation sequence (e.g. ['d','i','d','d','d','i'])
|
313 |
+
labeled creation sequence (e.g. [(0,'d'),(2,'d'),(1,'i')])
|
314 |
+
compact creation sequence (e.g. [2,1,1,2,0])
|
315 |
+
|
316 |
+
Use cs=creation_sequence(degree_sequence,labeled=True)
|
317 |
+
to convert a degree sequence to a creation sequence.
|
318 |
+
|
319 |
+
Returns None if the sequence is not valid
|
320 |
+
"""
|
321 |
+
# Turn input sequence into a labeled creation sequence
|
322 |
+
first = creation_sequence[0]
|
323 |
+
if isinstance(first, str): # creation sequence
|
324 |
+
ci = list(enumerate(creation_sequence))
|
325 |
+
elif isinstance(first, tuple): # labeled creation sequence
|
326 |
+
ci = creation_sequence[:]
|
327 |
+
elif isinstance(first, int): # compact creation sequence
|
328 |
+
cs = uncompact(creation_sequence)
|
329 |
+
ci = list(enumerate(cs))
|
330 |
+
else:
|
331 |
+
print("not a valid creation sequence type")
|
332 |
+
return None
|
333 |
+
|
334 |
+
G = nx.empty_graph(0, create_using)
|
335 |
+
if G.is_directed():
|
336 |
+
raise nx.NetworkXError("Directed Graph not supported")
|
337 |
+
|
338 |
+
G.name = "Threshold Graph"
|
339 |
+
|
340 |
+
# add nodes and edges
|
341 |
+
# if type is 'i' just add nodea
|
342 |
+
# if type is a d connect to everything previous
|
343 |
+
while ci:
|
344 |
+
(v, node_type) = ci.pop(0)
|
345 |
+
if node_type == "d": # dominating type, connect to all existing nodes
|
346 |
+
# We use `for u in list(G):` instead of
|
347 |
+
# `for u in G:` because we edit the graph `G` in
|
348 |
+
# the loop. Hence using an iterator will result in
|
349 |
+
# `RuntimeError: dictionary changed size during iteration`
|
350 |
+
for u in list(G):
|
351 |
+
G.add_edge(v, u)
|
352 |
+
G.add_node(v)
|
353 |
+
return G
|
354 |
+
|
355 |
+
|
356 |
+
@nx._dispatchable
|
357 |
+
def find_alternating_4_cycle(G):
|
358 |
+
"""
|
359 |
+
Returns False if there aren't any alternating 4 cycles.
|
360 |
+
Otherwise returns the cycle as [a,b,c,d] where (a,b)
|
361 |
+
and (c,d) are edges and (a,c) and (b,d) are not.
|
362 |
+
"""
|
363 |
+
for u, v in G.edges():
|
364 |
+
for w in G.nodes():
|
365 |
+
if not G.has_edge(u, w) and u != w:
|
366 |
+
for x in G.neighbors(w):
|
367 |
+
if not G.has_edge(v, x) and v != x:
|
368 |
+
return [u, v, w, x]
|
369 |
+
return False
|
370 |
+
|
371 |
+
|
372 |
+
@nx._dispatchable(returns_graph=True)
|
373 |
+
def find_threshold_graph(G, create_using=None):
|
374 |
+
"""
|
375 |
+
Returns a threshold subgraph that is close to largest in `G`.
|
376 |
+
|
377 |
+
The threshold graph will contain the largest degree node in G.
|
378 |
+
|
379 |
+
Parameters
|
380 |
+
----------
|
381 |
+
G : NetworkX graph instance
|
382 |
+
An instance of `Graph`, or `MultiDiGraph`
|
383 |
+
create_using : NetworkX graph class or `None` (default), optional
|
384 |
+
Type of graph to use when constructing the threshold graph.
|
385 |
+
If `None`, infer the appropriate graph type from the input.
|
386 |
+
|
387 |
+
Returns
|
388 |
+
-------
|
389 |
+
graph :
|
390 |
+
A graph instance representing the threshold graph
|
391 |
+
|
392 |
+
Examples
|
393 |
+
--------
|
394 |
+
>>> from networkx.algorithms.threshold import find_threshold_graph
|
395 |
+
>>> G = nx.barbell_graph(3, 3)
|
396 |
+
>>> T = find_threshold_graph(G)
|
397 |
+
>>> T.nodes # may vary
|
398 |
+
NodeView((7, 8, 5, 6))
|
399 |
+
|
400 |
+
References
|
401 |
+
----------
|
402 |
+
.. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph
|
403 |
+
"""
|
404 |
+
return threshold_graph(find_creation_sequence(G), create_using)
|
405 |
+
|
406 |
+
|
407 |
+
@nx._dispatchable
|
408 |
+
def find_creation_sequence(G):
|
409 |
+
"""
|
410 |
+
Find a threshold subgraph that is close to largest in G.
|
411 |
+
Returns the labeled creation sequence of that threshold graph.
|
412 |
+
"""
|
413 |
+
cs = []
|
414 |
+
# get a local pointer to the working part of the graph
|
415 |
+
H = G
|
416 |
+
while H.order() > 0:
|
417 |
+
# get new degree sequence on subgraph
|
418 |
+
dsdict = dict(H.degree())
|
419 |
+
ds = [(d, v) for v, d in dsdict.items()]
|
420 |
+
ds.sort()
|
421 |
+
# Update threshold graph nodes
|
422 |
+
if ds[-1][0] == 0: # all are isolated
|
423 |
+
cs.extend(zip(dsdict, ["i"] * (len(ds) - 1) + ["d"]))
|
424 |
+
break # Done!
|
425 |
+
# pull off isolated nodes
|
426 |
+
while ds[0][0] == 0:
|
427 |
+
(d, iso) = ds.pop(0)
|
428 |
+
cs.append((iso, "i"))
|
429 |
+
# find new biggest node
|
430 |
+
(d, bigv) = ds.pop()
|
431 |
+
# add edges of star to t_g
|
432 |
+
cs.append((bigv, "d"))
|
433 |
+
# form subgraph of neighbors of big node
|
434 |
+
H = H.subgraph(H.neighbors(bigv))
|
435 |
+
cs.reverse()
|
436 |
+
return cs
|
437 |
+
|
438 |
+
|
439 |
+
# Properties of Threshold Graphs
|
440 |
+
def triangles(creation_sequence):
|
441 |
+
"""
|
442 |
+
Compute number of triangles in the threshold graph with the
|
443 |
+
given creation sequence.
|
444 |
+
"""
|
445 |
+
# shortcut algorithm that doesn't require computing number
|
446 |
+
# of triangles at each node.
|
447 |
+
cs = creation_sequence # alias
|
448 |
+
dr = cs.count("d") # number of d's in sequence
|
449 |
+
ntri = dr * (dr - 1) * (dr - 2) / 6 # number of triangles in clique of nd d's
|
450 |
+
# now add dr choose 2 triangles for every 'i' in sequence where
|
451 |
+
# dr is the number of d's to the right of the current i
|
452 |
+
for i, typ in enumerate(cs):
|
453 |
+
if typ == "i":
|
454 |
+
ntri += dr * (dr - 1) / 2
|
455 |
+
else:
|
456 |
+
dr -= 1
|
457 |
+
return ntri
|
458 |
+
|
459 |
+
|
460 |
+
def triangle_sequence(creation_sequence):
|
461 |
+
"""
|
462 |
+
Return triangle sequence for the given threshold graph creation sequence.
|
463 |
+
|
464 |
+
"""
|
465 |
+
cs = creation_sequence
|
466 |
+
seq = []
|
467 |
+
dr = cs.count("d") # number of d's to the right of the current pos
|
468 |
+
dcur = (dr - 1) * (dr - 2) // 2 # number of triangles through a node of clique dr
|
469 |
+
irun = 0 # number of i's in the last run
|
470 |
+
drun = 0 # number of d's in the last run
|
471 |
+
for i, sym in enumerate(cs):
|
472 |
+
if sym == "d":
|
473 |
+
drun += 1
|
474 |
+
tri = dcur + (dr - 1) * irun # new triangles at this d
|
475 |
+
else: # cs[i]="i":
|
476 |
+
if prevsym == "d": # new string of i's
|
477 |
+
dcur += (dr - 1) * irun # accumulate shared shortest paths
|
478 |
+
irun = 0 # reset i run counter
|
479 |
+
dr -= drun # reduce number of d's to right
|
480 |
+
drun = 0 # reset d run counter
|
481 |
+
irun += 1
|
482 |
+
tri = dr * (dr - 1) // 2 # new triangles at this i
|
483 |
+
seq.append(tri)
|
484 |
+
prevsym = sym
|
485 |
+
return seq
|
486 |
+
|
487 |
+
|
488 |
+
def cluster_sequence(creation_sequence):
|
489 |
+
"""
|
490 |
+
Return cluster sequence for the given threshold graph creation sequence.
|
491 |
+
"""
|
492 |
+
triseq = triangle_sequence(creation_sequence)
|
493 |
+
degseq = degree_sequence(creation_sequence)
|
494 |
+
cseq = []
|
495 |
+
for i, deg in enumerate(degseq):
|
496 |
+
tri = triseq[i]
|
497 |
+
if deg <= 1: # isolated vertex or single pair gets cc 0
|
498 |
+
cseq.append(0)
|
499 |
+
continue
|
500 |
+
max_size = (deg * (deg - 1)) // 2
|
501 |
+
cseq.append(tri / max_size)
|
502 |
+
return cseq
|
503 |
+
|
504 |
+
|
505 |
+
def degree_sequence(creation_sequence):
|
506 |
+
"""
|
507 |
+
Return degree sequence for the threshold graph with the given
|
508 |
+
creation sequence
|
509 |
+
"""
|
510 |
+
cs = creation_sequence # alias
|
511 |
+
seq = []
|
512 |
+
rd = cs.count("d") # number of d to the right
|
513 |
+
for i, sym in enumerate(cs):
|
514 |
+
if sym == "d":
|
515 |
+
rd -= 1
|
516 |
+
seq.append(rd + i)
|
517 |
+
else:
|
518 |
+
seq.append(rd)
|
519 |
+
return seq
|
520 |
+
|
521 |
+
|
522 |
+
def density(creation_sequence):
|
523 |
+
"""
|
524 |
+
Return the density of the graph with this creation_sequence.
|
525 |
+
The density is the fraction of possible edges present.
|
526 |
+
"""
|
527 |
+
N = len(creation_sequence)
|
528 |
+
two_size = sum(degree_sequence(creation_sequence))
|
529 |
+
two_possible = N * (N - 1)
|
530 |
+
den = two_size / two_possible
|
531 |
+
return den
|
532 |
+
|
533 |
+
|
534 |
+
def degree_correlation(creation_sequence):
|
535 |
+
"""
|
536 |
+
Return the degree-degree correlation over all edges.
|
537 |
+
"""
|
538 |
+
cs = creation_sequence
|
539 |
+
s1 = 0 # deg_i*deg_j
|
540 |
+
s2 = 0 # deg_i^2+deg_j^2
|
541 |
+
s3 = 0 # deg_i+deg_j
|
542 |
+
m = 0 # number of edges
|
543 |
+
rd = cs.count("d") # number of d nodes to the right
|
544 |
+
rdi = [i for i, sym in enumerate(cs) if sym == "d"] # index of "d"s
|
545 |
+
ds = degree_sequence(cs)
|
546 |
+
for i, sym in enumerate(cs):
|
547 |
+
if sym == "d":
|
548 |
+
if i != rdi[0]:
|
549 |
+
print("Logic error in degree_correlation", i, rdi)
|
550 |
+
raise ValueError
|
551 |
+
rdi.pop(0)
|
552 |
+
degi = ds[i]
|
553 |
+
for dj in rdi:
|
554 |
+
degj = ds[dj]
|
555 |
+
s1 += degj * degi
|
556 |
+
s2 += degi**2 + degj**2
|
557 |
+
s3 += degi + degj
|
558 |
+
m += 1
|
559 |
+
denom = 2 * m * s2 - s3 * s3
|
560 |
+
numer = 4 * m * s1 - s3 * s3
|
561 |
+
if denom == 0:
|
562 |
+
if numer == 0:
|
563 |
+
return 1
|
564 |
+
raise ValueError(f"Zero Denominator but Numerator is {numer}")
|
565 |
+
return numer / denom
|
566 |
+
|
567 |
+
|
568 |
+
def shortest_path(creation_sequence, u, v):
|
569 |
+
"""
|
570 |
+
Find the shortest path between u and v in a
|
571 |
+
threshold graph G with the given creation_sequence.
|
572 |
+
|
573 |
+
For an unlabeled creation_sequence, the vertices
|
574 |
+
u and v must be integers in (0,len(sequence)) referring
|
575 |
+
to the position of the desired vertices in the sequence.
|
576 |
+
|
577 |
+
For a labeled creation_sequence, u and v are labels of vertices.
|
578 |
+
|
579 |
+
Use cs=creation_sequence(degree_sequence,with_labels=True)
|
580 |
+
to convert a degree sequence to a creation sequence.
|
581 |
+
|
582 |
+
Returns a list of vertices from u to v.
|
583 |
+
Example: if they are neighbors, it returns [u,v]
|
584 |
+
"""
|
585 |
+
# Turn input sequence into a labeled creation sequence
|
586 |
+
first = creation_sequence[0]
|
587 |
+
if isinstance(first, str): # creation sequence
|
588 |
+
cs = [(i, creation_sequence[i]) for i in range(len(creation_sequence))]
|
589 |
+
elif isinstance(first, tuple): # labeled creation sequence
|
590 |
+
cs = creation_sequence[:]
|
591 |
+
elif isinstance(first, int): # compact creation sequence
|
592 |
+
ci = uncompact(creation_sequence)
|
593 |
+
cs = [(i, ci[i]) for i in range(len(ci))]
|
594 |
+
else:
|
595 |
+
raise TypeError("Not a valid creation sequence type")
|
596 |
+
|
597 |
+
verts = [s[0] for s in cs]
|
598 |
+
if v not in verts:
|
599 |
+
raise ValueError(f"Vertex {v} not in graph from creation_sequence")
|
600 |
+
if u not in verts:
|
601 |
+
raise ValueError(f"Vertex {u} not in graph from creation_sequence")
|
602 |
+
# Done checking
|
603 |
+
if u == v:
|
604 |
+
return [u]
|
605 |
+
|
606 |
+
uindex = verts.index(u)
|
607 |
+
vindex = verts.index(v)
|
608 |
+
bigind = max(uindex, vindex)
|
609 |
+
if cs[bigind][1] == "d":
|
610 |
+
return [u, v]
|
611 |
+
# must be that cs[bigind][1]=='i'
|
612 |
+
cs = cs[bigind:]
|
613 |
+
while cs:
|
614 |
+
vert = cs.pop()
|
615 |
+
if vert[1] == "d":
|
616 |
+
return [u, vert[0], v]
|
617 |
+
# All after u are type 'i' so no connection
|
618 |
+
return -1
|
619 |
+
|
620 |
+
|
621 |
+
def shortest_path_length(creation_sequence, i):
|
622 |
+
"""
|
623 |
+
Return the shortest path length from indicated node to
|
624 |
+
every other node for the threshold graph with the given
|
625 |
+
creation sequence.
|
626 |
+
Node is indicated by index i in creation_sequence unless
|
627 |
+
creation_sequence is labeled in which case, i is taken to
|
628 |
+
be the label of the node.
|
629 |
+
|
630 |
+
Paths lengths in threshold graphs are at most 2.
|
631 |
+
Length to unreachable nodes is set to -1.
|
632 |
+
"""
|
633 |
+
# Turn input sequence into a labeled creation sequence
|
634 |
+
first = creation_sequence[0]
|
635 |
+
if isinstance(first, str): # creation sequence
|
636 |
+
if isinstance(creation_sequence, list):
|
637 |
+
cs = creation_sequence[:]
|
638 |
+
else:
|
639 |
+
cs = list(creation_sequence)
|
640 |
+
elif isinstance(first, tuple): # labeled creation sequence
|
641 |
+
cs = [v[1] for v in creation_sequence]
|
642 |
+
i = [v[0] for v in creation_sequence].index(i)
|
643 |
+
elif isinstance(first, int): # compact creation sequence
|
644 |
+
cs = uncompact(creation_sequence)
|
645 |
+
else:
|
646 |
+
raise TypeError("Not a valid creation sequence type")
|
647 |
+
|
648 |
+
# Compute
|
649 |
+
N = len(cs)
|
650 |
+
spl = [2] * N # length 2 to every node
|
651 |
+
spl[i] = 0 # except self which is 0
|
652 |
+
# 1 for all d's to the right
|
653 |
+
for j in range(i + 1, N):
|
654 |
+
if cs[j] == "d":
|
655 |
+
spl[j] = 1
|
656 |
+
if cs[i] == "d": # 1 for all nodes to the left
|
657 |
+
for j in range(i):
|
658 |
+
spl[j] = 1
|
659 |
+
# and -1 for any trailing i to indicate unreachable
|
660 |
+
for j in range(N - 1, 0, -1):
|
661 |
+
if cs[j] == "d":
|
662 |
+
break
|
663 |
+
spl[j] = -1
|
664 |
+
return spl
|
665 |
+
|
666 |
+
|
667 |
+
def betweenness_sequence(creation_sequence, normalized=True):
|
668 |
+
"""
|
669 |
+
Return betweenness for the threshold graph with the given creation
|
670 |
+
sequence. The result is unscaled. To scale the values
|
671 |
+
to the interval [0,1] divide by (n-1)*(n-2).
|
672 |
+
"""
|
673 |
+
cs = creation_sequence
|
674 |
+
seq = [] # betweenness
|
675 |
+
lastchar = "d" # first node is always a 'd'
|
676 |
+
dr = float(cs.count("d")) # number of d's to the right of current pos
|
677 |
+
irun = 0 # number of i's in the last run
|
678 |
+
drun = 0 # number of d's in the last run
|
679 |
+
dlast = 0.0 # betweenness of last d
|
680 |
+
for i, c in enumerate(cs):
|
681 |
+
if c == "d": # cs[i]=="d":
|
682 |
+
# betweenness = amt shared with earlier d's and i's
|
683 |
+
# + new isolated nodes covered
|
684 |
+
# + new paths to all previous nodes
|
685 |
+
b = dlast + (irun - 1) * irun / dr + 2 * irun * (i - drun - irun) / dr
|
686 |
+
drun += 1 # update counter
|
687 |
+
else: # cs[i]="i":
|
688 |
+
if lastchar == "d": # if this is a new run of i's
|
689 |
+
dlast = b # accumulate betweenness
|
690 |
+
dr -= drun # update number of d's to the right
|
691 |
+
drun = 0 # reset d counter
|
692 |
+
irun = 0 # reset i counter
|
693 |
+
b = 0 # isolated nodes have zero betweenness
|
694 |
+
irun += 1 # add another i to the run
|
695 |
+
seq.append(float(b))
|
696 |
+
lastchar = c
|
697 |
+
|
698 |
+
# normalize by the number of possible shortest paths
|
699 |
+
if normalized:
|
700 |
+
order = len(cs)
|
701 |
+
scale = 1.0 / ((order - 1) * (order - 2))
|
702 |
+
seq = [s * scale for s in seq]
|
703 |
+
|
704 |
+
return seq
|
705 |
+
|
706 |
+
|
707 |
+
def eigenvectors(creation_sequence):
|
708 |
+
"""
|
709 |
+
Return a 2-tuple of Laplacian eigenvalues and eigenvectors
|
710 |
+
for the threshold network with creation_sequence.
|
711 |
+
The first value is a list of eigenvalues.
|
712 |
+
The second value is a list of eigenvectors.
|
713 |
+
The lists are in the same order so corresponding eigenvectors
|
714 |
+
and eigenvalues are in the same position in the two lists.
|
715 |
+
|
716 |
+
Notice that the order of the eigenvalues returned by eigenvalues(cs)
|
717 |
+
may not correspond to the order of these eigenvectors.
|
718 |
+
"""
|
719 |
+
ccs = make_compact(creation_sequence)
|
720 |
+
N = sum(ccs)
|
721 |
+
vec = [0] * N
|
722 |
+
val = vec[:]
|
723 |
+
# get number of type d nodes to the right (all for first node)
|
724 |
+
dr = sum(ccs[::2])
|
725 |
+
|
726 |
+
nn = ccs[0]
|
727 |
+
vec[0] = [1.0 / sqrt(N)] * N
|
728 |
+
val[0] = 0
|
729 |
+
e = dr
|
730 |
+
dr -= nn
|
731 |
+
type_d = True
|
732 |
+
i = 1
|
733 |
+
dd = 1
|
734 |
+
while dd < nn:
|
735 |
+
scale = 1.0 / sqrt(dd * dd + i)
|
736 |
+
vec[i] = i * [-scale] + [dd * scale] + [0] * (N - i - 1)
|
737 |
+
val[i] = e
|
738 |
+
i += 1
|
739 |
+
dd += 1
|
740 |
+
if len(ccs) == 1:
|
741 |
+
return (val, vec)
|
742 |
+
for nn in ccs[1:]:
|
743 |
+
scale = 1.0 / sqrt(nn * i * (i + nn))
|
744 |
+
vec[i] = i * [-nn * scale] + nn * [i * scale] + [0] * (N - i - nn)
|
745 |
+
# find eigenvalue
|
746 |
+
type_d = not type_d
|
747 |
+
if type_d:
|
748 |
+
e = i + dr
|
749 |
+
dr -= nn
|
750 |
+
else:
|
751 |
+
e = dr
|
752 |
+
val[i] = e
|
753 |
+
st = i
|
754 |
+
i += 1
|
755 |
+
dd = 1
|
756 |
+
while dd < nn:
|
757 |
+
scale = 1.0 / sqrt(i - st + dd * dd)
|
758 |
+
vec[i] = [0] * st + (i - st) * [-scale] + [dd * scale] + [0] * (N - i - 1)
|
759 |
+
val[i] = e
|
760 |
+
i += 1
|
761 |
+
dd += 1
|
762 |
+
return (val, vec)
|
763 |
+
|
764 |
+
|
765 |
+
def spectral_projection(u, eigenpairs):
|
766 |
+
"""
|
767 |
+
Returns the coefficients of each eigenvector
|
768 |
+
in a projection of the vector u onto the normalized
|
769 |
+
eigenvectors which are contained in eigenpairs.
|
770 |
+
|
771 |
+
eigenpairs should be a list of two objects. The
|
772 |
+
first is a list of eigenvalues and the second a list
|
773 |
+
of eigenvectors. The eigenvectors should be lists.
|
774 |
+
|
775 |
+
There's not a lot of error checking on lengths of
|
776 |
+
arrays, etc. so be careful.
|
777 |
+
"""
|
778 |
+
coeff = []
|
779 |
+
evect = eigenpairs[1]
|
780 |
+
for ev in evect:
|
781 |
+
c = sum(evv * uv for (evv, uv) in zip(ev, u))
|
782 |
+
coeff.append(c)
|
783 |
+
return coeff
|
784 |
+
|
785 |
+
|
786 |
+
def eigenvalues(creation_sequence):
|
787 |
+
"""
|
788 |
+
Return sequence of eigenvalues of the Laplacian of the threshold
|
789 |
+
graph for the given creation_sequence.
|
790 |
+
|
791 |
+
Based on the Ferrer's diagram method. The spectrum is integral
|
792 |
+
and is the conjugate of the degree sequence.
|
793 |
+
|
794 |
+
See::
|
795 |
+
|
796 |
+
@Article{degree-merris-1994,
|
797 |
+
author = {Russel Merris},
|
798 |
+
title = {Degree maximal graphs are Laplacian integral},
|
799 |
+
journal = {Linear Algebra Appl.},
|
800 |
+
year = {1994},
|
801 |
+
volume = {199},
|
802 |
+
pages = {381--389},
|
803 |
+
}
|
804 |
+
|
805 |
+
"""
|
806 |
+
degseq = degree_sequence(creation_sequence)
|
807 |
+
degseq.sort()
|
808 |
+
eiglist = [] # zero is always one eigenvalue
|
809 |
+
eig = 0
|
810 |
+
row = len(degseq)
|
811 |
+
bigdeg = degseq.pop()
|
812 |
+
while row:
|
813 |
+
if bigdeg < row:
|
814 |
+
eiglist.append(eig)
|
815 |
+
row -= 1
|
816 |
+
else:
|
817 |
+
eig += 1
|
818 |
+
if degseq:
|
819 |
+
bigdeg = degseq.pop()
|
820 |
+
else:
|
821 |
+
bigdeg = 0
|
822 |
+
return eiglist
|
823 |
+
|
824 |
+
|
825 |
+
# Threshold graph creation routines
|
826 |
+
|
827 |
+
|
828 |
+
@py_random_state(2)
|
829 |
+
def random_threshold_sequence(n, p, seed=None):
|
830 |
+
"""
|
831 |
+
Create a random threshold sequence of size n.
|
832 |
+
A creation sequence is built by randomly choosing d's with
|
833 |
+
probability p and i's with probability 1-p.
|
834 |
+
|
835 |
+
s=nx.random_threshold_sequence(10,0.5)
|
836 |
+
|
837 |
+
returns a threshold sequence of length 10 with equal
|
838 |
+
probably of an i or a d at each position.
|
839 |
+
|
840 |
+
A "random" threshold graph can be built with
|
841 |
+
|
842 |
+
G=nx.threshold_graph(s)
|
843 |
+
|
844 |
+
seed : integer, random_state, or None (default)
|
845 |
+
Indicator of random number generation state.
|
846 |
+
See :ref:`Randomness<randomness>`.
|
847 |
+
"""
|
848 |
+
if not (0 <= p <= 1):
|
849 |
+
raise ValueError("p must be in [0,1]")
|
850 |
+
|
851 |
+
cs = ["d"] # threshold sequences always start with a d
|
852 |
+
for i in range(1, n):
|
853 |
+
if seed.random() < p:
|
854 |
+
cs.append("d")
|
855 |
+
else:
|
856 |
+
cs.append("i")
|
857 |
+
return cs
|
858 |
+
|
859 |
+
|
860 |
+
# maybe *_d_threshold_sequence routines should
|
861 |
+
# be (or be called from) a single routine with a more descriptive name
|
862 |
+
# and a keyword parameter?
|
863 |
+
def right_d_threshold_sequence(n, m):
|
864 |
+
"""
|
865 |
+
Create a skewed threshold graph with a given number
|
866 |
+
of vertices (n) and a given number of edges (m).
|
867 |
+
|
868 |
+
The routine returns an unlabeled creation sequence
|
869 |
+
for the threshold graph.
|
870 |
+
|
871 |
+
FIXME: describe algorithm
|
872 |
+
|
873 |
+
"""
|
874 |
+
cs = ["d"] + ["i"] * (n - 1) # create sequence with n insolated nodes
|
875 |
+
|
876 |
+
# m <n : not enough edges, make disconnected
|
877 |
+
if m < n:
|
878 |
+
cs[m] = "d"
|
879 |
+
return cs
|
880 |
+
|
881 |
+
# too many edges
|
882 |
+
if m > n * (n - 1) / 2:
|
883 |
+
raise ValueError("Too many edges for this many nodes.")
|
884 |
+
|
885 |
+
# connected case m >n-1
|
886 |
+
ind = n - 1
|
887 |
+
sum = n - 1
|
888 |
+
while sum < m:
|
889 |
+
cs[ind] = "d"
|
890 |
+
ind -= 1
|
891 |
+
sum += ind
|
892 |
+
ind = m - (sum - ind)
|
893 |
+
cs[ind] = "d"
|
894 |
+
return cs
|
895 |
+
|
896 |
+
|
897 |
+
def left_d_threshold_sequence(n, m):
|
898 |
+
"""
|
899 |
+
Create a skewed threshold graph with a given number
|
900 |
+
of vertices (n) and a given number of edges (m).
|
901 |
+
|
902 |
+
The routine returns an unlabeled creation sequence
|
903 |
+
for the threshold graph.
|
904 |
+
|
905 |
+
FIXME: describe algorithm
|
906 |
+
|
907 |
+
"""
|
908 |
+
cs = ["d"] + ["i"] * (n - 1) # create sequence with n insolated nodes
|
909 |
+
|
910 |
+
# m <n : not enough edges, make disconnected
|
911 |
+
if m < n:
|
912 |
+
cs[m] = "d"
|
913 |
+
return cs
|
914 |
+
|
915 |
+
# too many edges
|
916 |
+
if m > n * (n - 1) / 2:
|
917 |
+
raise ValueError("Too many edges for this many nodes.")
|
918 |
+
|
919 |
+
# Connected case when M>N-1
|
920 |
+
cs[n - 1] = "d"
|
921 |
+
sum = n - 1
|
922 |
+
ind = 1
|
923 |
+
while sum < m:
|
924 |
+
cs[ind] = "d"
|
925 |
+
sum += ind
|
926 |
+
ind += 1
|
927 |
+
if sum > m: # be sure not to change the first vertex
|
928 |
+
cs[sum - m] = "i"
|
929 |
+
return cs
|
930 |
+
|
931 |
+
|
932 |
+
@py_random_state(3)
|
933 |
+
def swap_d(cs, p_split=1.0, p_combine=1.0, seed=None):
|
934 |
+
"""
|
935 |
+
Perform a "swap" operation on a threshold sequence.
|
936 |
+
|
937 |
+
The swap preserves the number of nodes and edges
|
938 |
+
in the graph for the given sequence.
|
939 |
+
The resulting sequence is still a threshold sequence.
|
940 |
+
|
941 |
+
Perform one split and one combine operation on the
|
942 |
+
'd's of a creation sequence for a threshold graph.
|
943 |
+
This operation maintains the number of nodes and edges
|
944 |
+
in the graph, but shifts the edges from node to node
|
945 |
+
maintaining the threshold quality of the graph.
|
946 |
+
|
947 |
+
seed : integer, random_state, or None (default)
|
948 |
+
Indicator of random number generation state.
|
949 |
+
See :ref:`Randomness<randomness>`.
|
950 |
+
"""
|
951 |
+
# preprocess the creation sequence
|
952 |
+
dlist = [i for (i, node_type) in enumerate(cs[1:-1]) if node_type == "d"]
|
953 |
+
# split
|
954 |
+
if seed.random() < p_split:
|
955 |
+
choice = seed.choice(dlist)
|
956 |
+
split_to = seed.choice(range(choice))
|
957 |
+
flip_side = choice - split_to
|
958 |
+
if split_to != flip_side and cs[split_to] == "i" and cs[flip_side] == "i":
|
959 |
+
cs[choice] = "i"
|
960 |
+
cs[split_to] = "d"
|
961 |
+
cs[flip_side] = "d"
|
962 |
+
dlist.remove(choice)
|
963 |
+
# don't add or combine may reverse this action
|
964 |
+
# dlist.extend([split_to,flip_side])
|
965 |
+
# print >>sys.stderr,"split at %s to %s and %s"%(choice,split_to,flip_side)
|
966 |
+
# combine
|
967 |
+
if seed.random() < p_combine and dlist:
|
968 |
+
first_choice = seed.choice(dlist)
|
969 |
+
second_choice = seed.choice(dlist)
|
970 |
+
target = first_choice + second_choice
|
971 |
+
if target >= len(cs) or cs[target] == "d" or first_choice == second_choice:
|
972 |
+
return cs
|
973 |
+
# OK to combine
|
974 |
+
cs[first_choice] = "i"
|
975 |
+
cs[second_choice] = "i"
|
976 |
+
cs[target] = "d"
|
977 |
+
# print >>sys.stderr,"combine %s and %s to make %s."%(first_choice,second_choice,target)
|
978 |
+
|
979 |
+
return cs
|
venv/lib/python3.10/site-packages/networkx/algorithms/tournament.py
ADDED
@@ -0,0 +1,406 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions concerning tournament graphs.
|
2 |
+
|
3 |
+
A `tournament graph`_ is a complete oriented graph. In other words, it
|
4 |
+
is a directed graph in which there is exactly one directed edge joining
|
5 |
+
each pair of distinct nodes. For each function in this module that
|
6 |
+
accepts a graph as input, you must provide a tournament graph. The
|
7 |
+
responsibility is on the caller to ensure that the graph is a tournament
|
8 |
+
graph:
|
9 |
+
|
10 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
|
11 |
+
>>> nx.is_tournament(G)
|
12 |
+
True
|
13 |
+
|
14 |
+
To access the functions in this module, you must access them through the
|
15 |
+
:mod:`networkx.tournament` module::
|
16 |
+
|
17 |
+
>>> nx.tournament.is_reachable(G, 0, 1)
|
18 |
+
True
|
19 |
+
|
20 |
+
.. _tournament graph: https://en.wikipedia.org/wiki/Tournament_%28graph_theory%29
|
21 |
+
|
22 |
+
"""
|
23 |
+
from itertools import combinations
|
24 |
+
|
25 |
+
import networkx as nx
|
26 |
+
from networkx.algorithms.simple_paths import is_simple_path as is_path
|
27 |
+
from networkx.utils import arbitrary_element, not_implemented_for, py_random_state
|
28 |
+
|
29 |
+
__all__ = [
|
30 |
+
"hamiltonian_path",
|
31 |
+
"is_reachable",
|
32 |
+
"is_strongly_connected",
|
33 |
+
"is_tournament",
|
34 |
+
"random_tournament",
|
35 |
+
"score_sequence",
|
36 |
+
]
|
37 |
+
|
38 |
+
|
39 |
+
def index_satisfying(iterable, condition):
|
40 |
+
"""Returns the index of the first element in `iterable` that
|
41 |
+
satisfies the given condition.
|
42 |
+
|
43 |
+
If no such element is found (that is, when the iterable is
|
44 |
+
exhausted), this returns the length of the iterable (that is, one
|
45 |
+
greater than the last index of the iterable).
|
46 |
+
|
47 |
+
`iterable` must not be empty. If `iterable` is empty, this
|
48 |
+
function raises :exc:`ValueError`.
|
49 |
+
|
50 |
+
"""
|
51 |
+
# Pre-condition: iterable must not be empty.
|
52 |
+
for i, x in enumerate(iterable):
|
53 |
+
if condition(x):
|
54 |
+
return i
|
55 |
+
# If we reach the end of the iterable without finding an element
|
56 |
+
# that satisfies the condition, return the length of the iterable,
|
57 |
+
# which is one greater than the index of its last element. If the
|
58 |
+
# iterable was empty, `i` will not be defined, so we raise an
|
59 |
+
# exception.
|
60 |
+
try:
|
61 |
+
return i + 1
|
62 |
+
except NameError as err:
|
63 |
+
raise ValueError("iterable must be non-empty") from err
|
64 |
+
|
65 |
+
|
66 |
+
@not_implemented_for("undirected")
|
67 |
+
@not_implemented_for("multigraph")
|
68 |
+
@nx._dispatchable
|
69 |
+
def is_tournament(G):
|
70 |
+
"""Returns True if and only if `G` is a tournament.
|
71 |
+
|
72 |
+
A tournament is a directed graph, with neither self-loops nor
|
73 |
+
multi-edges, in which there is exactly one directed edge joining
|
74 |
+
each pair of distinct nodes.
|
75 |
+
|
76 |
+
Parameters
|
77 |
+
----------
|
78 |
+
G : NetworkX graph
|
79 |
+
A directed graph representing a tournament.
|
80 |
+
|
81 |
+
Returns
|
82 |
+
-------
|
83 |
+
bool
|
84 |
+
Whether the given graph is a tournament graph.
|
85 |
+
|
86 |
+
Examples
|
87 |
+
--------
|
88 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
|
89 |
+
>>> nx.is_tournament(G)
|
90 |
+
True
|
91 |
+
|
92 |
+
Notes
|
93 |
+
-----
|
94 |
+
Some definitions require a self-loop on each node, but that is not
|
95 |
+
the convention used here.
|
96 |
+
|
97 |
+
"""
|
98 |
+
# In a tournament, there is exactly one directed edge joining each pair.
|
99 |
+
return (
|
100 |
+
all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2))
|
101 |
+
and nx.number_of_selfloops(G) == 0
|
102 |
+
)
|
103 |
+
|
104 |
+
|
105 |
+
@not_implemented_for("undirected")
|
106 |
+
@not_implemented_for("multigraph")
|
107 |
+
@nx._dispatchable
|
108 |
+
def hamiltonian_path(G):
|
109 |
+
"""Returns a Hamiltonian path in the given tournament graph.
|
110 |
+
|
111 |
+
Each tournament has a Hamiltonian path. If furthermore, the
|
112 |
+
tournament is strongly connected, then the returned Hamiltonian path
|
113 |
+
is a Hamiltonian cycle (by joining the endpoints of the path).
|
114 |
+
|
115 |
+
Parameters
|
116 |
+
----------
|
117 |
+
G : NetworkX graph
|
118 |
+
A directed graph representing a tournament.
|
119 |
+
|
120 |
+
Returns
|
121 |
+
-------
|
122 |
+
path : list
|
123 |
+
A list of nodes which form a Hamiltonian path in `G`.
|
124 |
+
|
125 |
+
Examples
|
126 |
+
--------
|
127 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)])
|
128 |
+
>>> nx.is_tournament(G)
|
129 |
+
True
|
130 |
+
>>> nx.tournament.hamiltonian_path(G)
|
131 |
+
[0, 1, 2, 3]
|
132 |
+
|
133 |
+
Notes
|
134 |
+
-----
|
135 |
+
This is a recursive implementation with an asymptotic running time
|
136 |
+
of $O(n^2)$, ignoring multiplicative polylogarithmic factors, where
|
137 |
+
$n$ is the number of nodes in the graph.
|
138 |
+
|
139 |
+
"""
|
140 |
+
if len(G) == 0:
|
141 |
+
return []
|
142 |
+
if len(G) == 1:
|
143 |
+
return [arbitrary_element(G)]
|
144 |
+
v = arbitrary_element(G)
|
145 |
+
hampath = hamiltonian_path(G.subgraph(set(G) - {v}))
|
146 |
+
# Get the index of the first node in the path that does *not* have
|
147 |
+
# an edge to `v`, then insert `v` before that node.
|
148 |
+
index = index_satisfying(hampath, lambda u: v not in G[u])
|
149 |
+
hampath.insert(index, v)
|
150 |
+
return hampath
|
151 |
+
|
152 |
+
|
153 |
+
@py_random_state(1)
|
154 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
155 |
+
def random_tournament(n, seed=None):
|
156 |
+
r"""Returns a random tournament graph on `n` nodes.
|
157 |
+
|
158 |
+
Parameters
|
159 |
+
----------
|
160 |
+
n : int
|
161 |
+
The number of nodes in the returned graph.
|
162 |
+
seed : integer, random_state, or None (default)
|
163 |
+
Indicator of random number generation state.
|
164 |
+
See :ref:`Randomness<randomness>`.
|
165 |
+
|
166 |
+
Returns
|
167 |
+
-------
|
168 |
+
G : DiGraph
|
169 |
+
A tournament on `n` nodes, with exactly one directed edge joining
|
170 |
+
each pair of distinct nodes.
|
171 |
+
|
172 |
+
Notes
|
173 |
+
-----
|
174 |
+
This algorithm adds, for each pair of distinct nodes, an edge with
|
175 |
+
uniformly random orientation. In other words, `\binom{n}{2}` flips
|
176 |
+
of an unbiased coin decide the orientations of the edges in the
|
177 |
+
graph.
|
178 |
+
|
179 |
+
"""
|
180 |
+
# Flip an unbiased coin for each pair of distinct nodes.
|
181 |
+
coins = (seed.random() for i in range((n * (n - 1)) // 2))
|
182 |
+
pairs = combinations(range(n), 2)
|
183 |
+
edges = ((u, v) if r < 0.5 else (v, u) for (u, v), r in zip(pairs, coins))
|
184 |
+
return nx.DiGraph(edges)
|
185 |
+
|
186 |
+
|
187 |
+
@not_implemented_for("undirected")
|
188 |
+
@not_implemented_for("multigraph")
|
189 |
+
@nx._dispatchable
|
190 |
+
def score_sequence(G):
|
191 |
+
"""Returns the score sequence for the given tournament graph.
|
192 |
+
|
193 |
+
The score sequence is the sorted list of the out-degrees of the
|
194 |
+
nodes of the graph.
|
195 |
+
|
196 |
+
Parameters
|
197 |
+
----------
|
198 |
+
G : NetworkX graph
|
199 |
+
A directed graph representing a tournament.
|
200 |
+
|
201 |
+
Returns
|
202 |
+
-------
|
203 |
+
list
|
204 |
+
A sorted list of the out-degrees of the nodes of `G`.
|
205 |
+
|
206 |
+
Examples
|
207 |
+
--------
|
208 |
+
>>> G = nx.DiGraph([(1, 0), (1, 3), (0, 2), (0, 3), (2, 1), (3, 2)])
|
209 |
+
>>> nx.is_tournament(G)
|
210 |
+
True
|
211 |
+
>>> nx.tournament.score_sequence(G)
|
212 |
+
[1, 1, 2, 2]
|
213 |
+
|
214 |
+
"""
|
215 |
+
return sorted(d for v, d in G.out_degree())
|
216 |
+
|
217 |
+
|
218 |
+
@not_implemented_for("undirected")
|
219 |
+
@not_implemented_for("multigraph")
|
220 |
+
@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}})
|
221 |
+
def tournament_matrix(G):
|
222 |
+
r"""Returns the tournament matrix for the given tournament graph.
|
223 |
+
|
224 |
+
This function requires SciPy.
|
225 |
+
|
226 |
+
The *tournament matrix* of a tournament graph with edge set *E* is
|
227 |
+
the matrix *T* defined by
|
228 |
+
|
229 |
+
.. math::
|
230 |
+
|
231 |
+
T_{i j} =
|
232 |
+
\begin{cases}
|
233 |
+
+1 & \text{if } (i, j) \in E \\
|
234 |
+
-1 & \text{if } (j, i) \in E \\
|
235 |
+
0 & \text{if } i == j.
|
236 |
+
\end{cases}
|
237 |
+
|
238 |
+
An equivalent definition is `T = A - A^T`, where *A* is the
|
239 |
+
adjacency matrix of the graph `G`.
|
240 |
+
|
241 |
+
Parameters
|
242 |
+
----------
|
243 |
+
G : NetworkX graph
|
244 |
+
A directed graph representing a tournament.
|
245 |
+
|
246 |
+
Returns
|
247 |
+
-------
|
248 |
+
SciPy sparse array
|
249 |
+
The tournament matrix of the tournament graph `G`.
|
250 |
+
|
251 |
+
Raises
|
252 |
+
------
|
253 |
+
ImportError
|
254 |
+
If SciPy is not available.
|
255 |
+
|
256 |
+
"""
|
257 |
+
A = nx.adjacency_matrix(G)
|
258 |
+
return A - A.T
|
259 |
+
|
260 |
+
|
261 |
+
@not_implemented_for("undirected")
|
262 |
+
@not_implemented_for("multigraph")
|
263 |
+
@nx._dispatchable
|
264 |
+
def is_reachable(G, s, t):
|
265 |
+
"""Decides whether there is a path from `s` to `t` in the
|
266 |
+
tournament.
|
267 |
+
|
268 |
+
This function is more theoretically efficient than the reachability
|
269 |
+
checks than the shortest path algorithms in
|
270 |
+
:mod:`networkx.algorithms.shortest_paths`.
|
271 |
+
|
272 |
+
The given graph **must** be a tournament, otherwise this function's
|
273 |
+
behavior is undefined.
|
274 |
+
|
275 |
+
Parameters
|
276 |
+
----------
|
277 |
+
G : NetworkX graph
|
278 |
+
A directed graph representing a tournament.
|
279 |
+
|
280 |
+
s : node
|
281 |
+
A node in the graph.
|
282 |
+
|
283 |
+
t : node
|
284 |
+
A node in the graph.
|
285 |
+
|
286 |
+
Returns
|
287 |
+
-------
|
288 |
+
bool
|
289 |
+
Whether there is a path from `s` to `t` in `G`.
|
290 |
+
|
291 |
+
Examples
|
292 |
+
--------
|
293 |
+
>>> G = nx.DiGraph([(1, 0), (1, 3), (1, 2), (2, 3), (2, 0), (3, 0)])
|
294 |
+
>>> nx.is_tournament(G)
|
295 |
+
True
|
296 |
+
>>> nx.tournament.is_reachable(G, 1, 3)
|
297 |
+
True
|
298 |
+
>>> nx.tournament.is_reachable(G, 3, 2)
|
299 |
+
False
|
300 |
+
|
301 |
+
Notes
|
302 |
+
-----
|
303 |
+
Although this function is more theoretically efficient than the
|
304 |
+
generic shortest path functions, a speedup requires the use of
|
305 |
+
parallelism. Though it may in the future, the current implementation
|
306 |
+
does not use parallelism, thus you may not see much of a speedup.
|
307 |
+
|
308 |
+
This algorithm comes from [1].
|
309 |
+
|
310 |
+
References
|
311 |
+
----------
|
312 |
+
.. [1] Tantau, Till.
|
313 |
+
"A note on the complexity of the reachability problem for
|
314 |
+
tournaments."
|
315 |
+
*Electronic Colloquium on Computational Complexity*. 2001.
|
316 |
+
<http://eccc.hpi-web.de/report/2001/092/>
|
317 |
+
"""
|
318 |
+
|
319 |
+
def two_neighborhood(G, v):
|
320 |
+
"""Returns the set of nodes at distance at most two from `v`.
|
321 |
+
|
322 |
+
`G` must be a graph and `v` a node in that graph.
|
323 |
+
|
324 |
+
The returned set includes the nodes at distance zero (that is,
|
325 |
+
the node `v` itself), the nodes at distance one (that is, the
|
326 |
+
out-neighbors of `v`), and the nodes at distance two.
|
327 |
+
|
328 |
+
"""
|
329 |
+
# TODO This is trivially parallelizable.
|
330 |
+
return {
|
331 |
+
x for x in G if x == v or x in G[v] or any(is_path(G, [v, z, x]) for z in G)
|
332 |
+
}
|
333 |
+
|
334 |
+
def is_closed(G, nodes):
|
335 |
+
"""Decides whether the given set of nodes is closed.
|
336 |
+
|
337 |
+
A set *S* of nodes is *closed* if for each node *u* in the graph
|
338 |
+
not in *S* and for each node *v* in *S*, there is an edge from
|
339 |
+
*u* to *v*.
|
340 |
+
|
341 |
+
"""
|
342 |
+
# TODO This is trivially parallelizable.
|
343 |
+
return all(v in G[u] for u in set(G) - nodes for v in nodes)
|
344 |
+
|
345 |
+
# TODO This is trivially parallelizable.
|
346 |
+
neighborhoods = [two_neighborhood(G, v) for v in G]
|
347 |
+
return all(not (is_closed(G, S) and s in S and t not in S) for S in neighborhoods)
|
348 |
+
|
349 |
+
|
350 |
+
@not_implemented_for("undirected")
|
351 |
+
@not_implemented_for("multigraph")
|
352 |
+
@nx._dispatchable(name="tournament_is_strongly_connected")
|
353 |
+
def is_strongly_connected(G):
|
354 |
+
"""Decides whether the given tournament is strongly connected.
|
355 |
+
|
356 |
+
This function is more theoretically efficient than the
|
357 |
+
:func:`~networkx.algorithms.components.is_strongly_connected`
|
358 |
+
function.
|
359 |
+
|
360 |
+
The given graph **must** be a tournament, otherwise this function's
|
361 |
+
behavior is undefined.
|
362 |
+
|
363 |
+
Parameters
|
364 |
+
----------
|
365 |
+
G : NetworkX graph
|
366 |
+
A directed graph representing a tournament.
|
367 |
+
|
368 |
+
Returns
|
369 |
+
-------
|
370 |
+
bool
|
371 |
+
Whether the tournament is strongly connected.
|
372 |
+
|
373 |
+
Examples
|
374 |
+
--------
|
375 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 0)])
|
376 |
+
>>> nx.is_tournament(G)
|
377 |
+
True
|
378 |
+
>>> nx.tournament.is_strongly_connected(G)
|
379 |
+
True
|
380 |
+
>>> G.remove_edge(3, 0)
|
381 |
+
>>> G.add_edge(0, 3)
|
382 |
+
>>> nx.is_tournament(G)
|
383 |
+
True
|
384 |
+
>>> nx.tournament.is_strongly_connected(G)
|
385 |
+
False
|
386 |
+
|
387 |
+
Notes
|
388 |
+
-----
|
389 |
+
Although this function is more theoretically efficient than the
|
390 |
+
generic strong connectivity function, a speedup requires the use of
|
391 |
+
parallelism. Though it may in the future, the current implementation
|
392 |
+
does not use parallelism, thus you may not see much of a speedup.
|
393 |
+
|
394 |
+
This algorithm comes from [1].
|
395 |
+
|
396 |
+
References
|
397 |
+
----------
|
398 |
+
.. [1] Tantau, Till.
|
399 |
+
"A note on the complexity of the reachability problem for
|
400 |
+
tournaments."
|
401 |
+
*Electronic Colloquium on Computational Complexity*. 2001.
|
402 |
+
<http://eccc.hpi-web.de/report/2001/092/>
|
403 |
+
|
404 |
+
"""
|
405 |
+
# TODO This is trivially parallelizable.
|
406 |
+
return all(is_reachable(G, u, v) for u in G for v in G)
|
venv/lib/python3.10/site-packages/networkx/algorithms/voronoi.py
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing the Voronoi cells of a graph."""
|
2 |
+
import networkx as nx
|
3 |
+
from networkx.utils import groups
|
4 |
+
|
5 |
+
__all__ = ["voronoi_cells"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable(edge_attrs="weight")
|
9 |
+
def voronoi_cells(G, center_nodes, weight="weight"):
|
10 |
+
"""Returns the Voronoi cells centered at `center_nodes` with respect
|
11 |
+
to the shortest-path distance metric.
|
12 |
+
|
13 |
+
If $C$ is a set of nodes in the graph and $c$ is an element of $C$,
|
14 |
+
the *Voronoi cell* centered at a node $c$ is the set of all nodes
|
15 |
+
$v$ that are closer to $c$ than to any other center node in $C$ with
|
16 |
+
respect to the shortest-path distance metric. [1]_
|
17 |
+
|
18 |
+
For directed graphs, this will compute the "outward" Voronoi cells,
|
19 |
+
as defined in [1]_, in which distance is measured from the center
|
20 |
+
nodes to the target node. For the "inward" Voronoi cells, use the
|
21 |
+
:meth:`DiGraph.reverse` method to reverse the orientation of the
|
22 |
+
edges before invoking this function on the directed graph.
|
23 |
+
|
24 |
+
Parameters
|
25 |
+
----------
|
26 |
+
G : NetworkX graph
|
27 |
+
|
28 |
+
center_nodes : set
|
29 |
+
A nonempty set of nodes in the graph `G` that represent the
|
30 |
+
center of the Voronoi cells.
|
31 |
+
|
32 |
+
weight : string or function
|
33 |
+
The edge attribute (or an arbitrary function) representing the
|
34 |
+
weight of an edge. This keyword argument is as described in the
|
35 |
+
documentation for :func:`~networkx.multi_source_dijkstra_path`,
|
36 |
+
for example.
|
37 |
+
|
38 |
+
Returns
|
39 |
+
-------
|
40 |
+
dictionary
|
41 |
+
A mapping from center node to set of all nodes in the graph
|
42 |
+
closer to that center node than to any other center node. The
|
43 |
+
keys of the dictionary are the element of `center_nodes`, and
|
44 |
+
the values of the dictionary form a partition of the nodes of
|
45 |
+
`G`.
|
46 |
+
|
47 |
+
Examples
|
48 |
+
--------
|
49 |
+
To get only the partition of the graph induced by the Voronoi cells,
|
50 |
+
take the collection of all values in the returned dictionary::
|
51 |
+
|
52 |
+
>>> G = nx.path_graph(6)
|
53 |
+
>>> center_nodes = {0, 3}
|
54 |
+
>>> cells = nx.voronoi_cells(G, center_nodes)
|
55 |
+
>>> partition = set(map(frozenset, cells.values()))
|
56 |
+
>>> sorted(map(sorted, partition))
|
57 |
+
[[0, 1], [2, 3, 4, 5]]
|
58 |
+
|
59 |
+
Raises
|
60 |
+
------
|
61 |
+
ValueError
|
62 |
+
If `center_nodes` is empty.
|
63 |
+
|
64 |
+
References
|
65 |
+
----------
|
66 |
+
.. [1] Erwig, Martin. (2000),"The graph Voronoi diagram with applications."
|
67 |
+
*Networks*, 36: 156--163.
|
68 |
+
https://doi.org/10.1002/1097-0037(200010)36:3<156::AID-NET2>3.0.CO;2-L
|
69 |
+
|
70 |
+
"""
|
71 |
+
# Determine the shortest paths from any one of the center nodes to
|
72 |
+
# every node in the graph.
|
73 |
+
#
|
74 |
+
# This raises `ValueError` if `center_nodes` is an empty set.
|
75 |
+
paths = nx.multi_source_dijkstra_path(G, center_nodes, weight=weight)
|
76 |
+
# Determine the center node from which the shortest path originates.
|
77 |
+
nearest = {v: p[0] for v, p in paths.items()}
|
78 |
+
# Get the mapping from center node to all nodes closer to it than to
|
79 |
+
# any other center node.
|
80 |
+
cells = groups(nearest)
|
81 |
+
# We collect all unreachable nodes under a special key, if there are any.
|
82 |
+
unreachable = set(G) - set(nearest)
|
83 |
+
if unreachable:
|
84 |
+
cells["unreachable"] = unreachable
|
85 |
+
return cells
|
venv/lib/python3.10/site-packages/networkx/algorithms/wiener.py
ADDED
@@ -0,0 +1,226 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions related to the Wiener Index of a graph.
|
2 |
+
|
3 |
+
The Wiener Index is a topological measure of a graph
|
4 |
+
related to the distance between nodes and their degree.
|
5 |
+
The Schultz Index and Gutman Index are similar measures.
|
6 |
+
They are used categorize molecules via the network of
|
7 |
+
atoms connected by chemical bonds. The indices are
|
8 |
+
correlated with functional aspects of the molecules.
|
9 |
+
|
10 |
+
References
|
11 |
+
----------
|
12 |
+
.. [1] `Wikipedia: Wiener Index <https://en.wikipedia.org/wiki/Wiener_index>`_
|
13 |
+
.. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices,
|
14 |
+
Croatica Chemica Acta, 71 (1998), 21-51.
|
15 |
+
https://hrcak.srce.hr/132323
|
16 |
+
"""
|
17 |
+
|
18 |
+
import itertools as it
|
19 |
+
|
20 |
+
import networkx as nx
|
21 |
+
|
22 |
+
__all__ = ["wiener_index", "schultz_index", "gutman_index"]
|
23 |
+
|
24 |
+
|
25 |
+
@nx._dispatchable(edge_attrs="weight")
|
26 |
+
def wiener_index(G, weight=None):
|
27 |
+
"""Returns the Wiener index of the given graph.
|
28 |
+
|
29 |
+
The *Wiener index* of a graph is the sum of the shortest-path
|
30 |
+
(weighted) distances between each pair of reachable nodes.
|
31 |
+
For pairs of nodes in undirected graphs, only one orientation
|
32 |
+
of the pair is counted.
|
33 |
+
|
34 |
+
Parameters
|
35 |
+
----------
|
36 |
+
G : NetworkX graph
|
37 |
+
|
38 |
+
weight : string or None, optional (default: None)
|
39 |
+
If None, every edge has weight 1.
|
40 |
+
If a string, use this edge attribute as the edge weight.
|
41 |
+
Any edge attribute not present defaults to 1.
|
42 |
+
The edge weights are used to computing shortest-path distances.
|
43 |
+
|
44 |
+
Returns
|
45 |
+
-------
|
46 |
+
number
|
47 |
+
The Wiener index of the graph `G`.
|
48 |
+
|
49 |
+
Raises
|
50 |
+
------
|
51 |
+
NetworkXError
|
52 |
+
If the graph `G` is not connected.
|
53 |
+
|
54 |
+
Notes
|
55 |
+
-----
|
56 |
+
If a pair of nodes is not reachable, the distance is assumed to be
|
57 |
+
infinity. This means that for graphs that are not
|
58 |
+
strongly-connected, this function returns ``inf``.
|
59 |
+
|
60 |
+
The Wiener index is not usually defined for directed graphs, however
|
61 |
+
this function uses the natural generalization of the Wiener index to
|
62 |
+
directed graphs.
|
63 |
+
|
64 |
+
Examples
|
65 |
+
--------
|
66 |
+
The Wiener index of the (unweighted) complete graph on *n* nodes
|
67 |
+
equals the number of pairs of the *n* nodes, since each pair of
|
68 |
+
nodes is at distance one::
|
69 |
+
|
70 |
+
>>> n = 10
|
71 |
+
>>> G = nx.complete_graph(n)
|
72 |
+
>>> nx.wiener_index(G) == n * (n - 1) / 2
|
73 |
+
True
|
74 |
+
|
75 |
+
Graphs that are not strongly-connected have infinite Wiener index::
|
76 |
+
|
77 |
+
>>> G = nx.empty_graph(2)
|
78 |
+
>>> nx.wiener_index(G)
|
79 |
+
inf
|
80 |
+
|
81 |
+
References
|
82 |
+
----------
|
83 |
+
.. [1] `Wikipedia: Wiener Index <https://en.wikipedia.org/wiki/Wiener_index>`_
|
84 |
+
"""
|
85 |
+
connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G)
|
86 |
+
if not connected:
|
87 |
+
return float("inf")
|
88 |
+
|
89 |
+
spl = nx.shortest_path_length(G, weight=weight)
|
90 |
+
total = sum(it.chain.from_iterable(nbrs.values() for node, nbrs in spl))
|
91 |
+
# Need to account for double counting pairs of nodes in undirected graphs.
|
92 |
+
return total if G.is_directed() else total / 2
|
93 |
+
|
94 |
+
|
95 |
+
@nx.utils.not_implemented_for("directed")
|
96 |
+
@nx.utils.not_implemented_for("multigraph")
|
97 |
+
@nx._dispatchable(edge_attrs="weight")
|
98 |
+
def schultz_index(G, weight=None):
|
99 |
+
r"""Returns the Schultz Index (of the first kind) of `G`
|
100 |
+
|
101 |
+
The *Schultz Index* [3]_ of a graph is the sum over all node pairs of
|
102 |
+
distances times the sum of degrees. Consider an undirected graph `G`.
|
103 |
+
For each node pair ``(u, v)`` compute ``dist(u, v) * (deg(u) + deg(v)``
|
104 |
+
where ``dist`` is the shortest path length between two nodes and ``deg``
|
105 |
+
is the degree of a node.
|
106 |
+
|
107 |
+
The Schultz Index is the sum of these quantities over all (unordered)
|
108 |
+
pairs of nodes.
|
109 |
+
|
110 |
+
Parameters
|
111 |
+
----------
|
112 |
+
G : NetworkX graph
|
113 |
+
The undirected graph of interest.
|
114 |
+
weight : string or None, optional (default: None)
|
115 |
+
If None, every edge has weight 1.
|
116 |
+
If a string, use this edge attribute as the edge weight.
|
117 |
+
Any edge attribute not present defaults to 1.
|
118 |
+
The edge weights are used to computing shortest-path distances.
|
119 |
+
|
120 |
+
Returns
|
121 |
+
-------
|
122 |
+
number
|
123 |
+
The first kind of Schultz Index of the graph `G`.
|
124 |
+
|
125 |
+
Examples
|
126 |
+
--------
|
127 |
+
The Schultz Index of the (unweighted) complete graph on *n* nodes
|
128 |
+
equals the number of pairs of the *n* nodes times ``2 * (n - 1)``,
|
129 |
+
since each pair of nodes is at distance one and the sum of degree
|
130 |
+
of two nodes is ``2 * (n - 1)``.
|
131 |
+
|
132 |
+
>>> n = 10
|
133 |
+
>>> G = nx.complete_graph(n)
|
134 |
+
>>> nx.schultz_index(G) == (n * (n - 1) / 2) * (2 * (n - 1))
|
135 |
+
True
|
136 |
+
|
137 |
+
Graph that is disconnected
|
138 |
+
|
139 |
+
>>> nx.schultz_index(nx.empty_graph(2))
|
140 |
+
inf
|
141 |
+
|
142 |
+
References
|
143 |
+
----------
|
144 |
+
.. [1] I. Gutman, Selected properties of the Schultz molecular topological index,
|
145 |
+
J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089.
|
146 |
+
https://doi.org/10.1021/ci00021a009
|
147 |
+
.. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices,
|
148 |
+
Croatica Chemica Acta, 71 (1998), 21-51.
|
149 |
+
https://hrcak.srce.hr/132323
|
150 |
+
.. [3] H. P. Schultz, Topological organic chemistry. 1.
|
151 |
+
Graph theory and topological indices of alkanes,i
|
152 |
+
J. Chem. Inf. Comput. Sci. 29 (1989), 239–257.
|
153 |
+
|
154 |
+
"""
|
155 |
+
if not nx.is_connected(G):
|
156 |
+
return float("inf")
|
157 |
+
|
158 |
+
spl = nx.shortest_path_length(G, weight=weight)
|
159 |
+
d = dict(G.degree, weight=weight)
|
160 |
+
return sum(dist * (d[u] + d[v]) for u, info in spl for v, dist in info.items()) / 2
|
161 |
+
|
162 |
+
|
163 |
+
@nx.utils.not_implemented_for("directed")
|
164 |
+
@nx.utils.not_implemented_for("multigraph")
|
165 |
+
@nx._dispatchable(edge_attrs="weight")
|
166 |
+
def gutman_index(G, weight=None):
|
167 |
+
r"""Returns the Gutman Index for the graph `G`.
|
168 |
+
|
169 |
+
The *Gutman Index* measures the topology of networks, especially for molecule
|
170 |
+
networks of atoms connected by bonds [1]_. It is also called the Schultz Index
|
171 |
+
of the second kind [2]_.
|
172 |
+
|
173 |
+
Consider an undirected graph `G` with node set ``V``.
|
174 |
+
The Gutman Index of a graph is the sum over all (unordered) pairs of nodes
|
175 |
+
of nodes ``(u, v)``, with distance ``dist(u, v)`` and degrees ``deg(u)``
|
176 |
+
and ``deg(v)``, of ``dist(u, v) * deg(u) * deg(v)``
|
177 |
+
|
178 |
+
Parameters
|
179 |
+
----------
|
180 |
+
G : NetworkX graph
|
181 |
+
|
182 |
+
weight : string or None, optional (default: None)
|
183 |
+
If None, every edge has weight 1.
|
184 |
+
If a string, use this edge attribute as the edge weight.
|
185 |
+
Any edge attribute not present defaults to 1.
|
186 |
+
The edge weights are used to computing shortest-path distances.
|
187 |
+
|
188 |
+
Returns
|
189 |
+
-------
|
190 |
+
number
|
191 |
+
The Gutman Index of the graph `G`.
|
192 |
+
|
193 |
+
Examples
|
194 |
+
--------
|
195 |
+
The Gutman Index of the (unweighted) complete graph on *n* nodes
|
196 |
+
equals the number of pairs of the *n* nodes times ``(n - 1) * (n - 1)``,
|
197 |
+
since each pair of nodes is at distance one and the product of degree of two
|
198 |
+
vertices is ``(n - 1) * (n - 1)``.
|
199 |
+
|
200 |
+
>>> n = 10
|
201 |
+
>>> G = nx.complete_graph(n)
|
202 |
+
>>> nx.gutman_index(G) == (n * (n - 1) / 2) * ((n - 1) * (n - 1))
|
203 |
+
True
|
204 |
+
|
205 |
+
Graphs that are disconnected
|
206 |
+
|
207 |
+
>>> G = nx.empty_graph(2)
|
208 |
+
>>> nx.gutman_index(G)
|
209 |
+
inf
|
210 |
+
|
211 |
+
References
|
212 |
+
----------
|
213 |
+
.. [1] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices,
|
214 |
+
Croatica Chemica Acta, 71 (1998), 21-51.
|
215 |
+
https://hrcak.srce.hr/132323
|
216 |
+
.. [2] I. Gutman, Selected properties of the Schultz molecular topological index,
|
217 |
+
J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089.
|
218 |
+
https://doi.org/10.1021/ci00021a009
|
219 |
+
|
220 |
+
"""
|
221 |
+
if not nx.is_connected(G):
|
222 |
+
return float("inf")
|
223 |
+
|
224 |
+
spl = nx.shortest_path_length(G, weight=weight)
|
225 |
+
d = dict(G.degree, weight=weight)
|
226 |
+
return sum(dist * d[u] * d[v] for u, vinfo in spl for v, dist in vinfo.items()) / 2
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-310.pyc
ADDED
Binary file (4.98 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-310.pyc
ADDED
Binary file (13.4 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-310.pyc
ADDED
Binary file (13.2 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-310.pyc
ADDED
Binary file (5.04 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-310.pyc
ADDED
Binary file (31.8 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-310.pyc
ADDED
Binary file (707 Bytes). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-310.pyc
ADDED
Binary file (13.5 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-310.pyc
ADDED
Binary file (41.1 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-310.pyc
ADDED
Binary file (5.18 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-310.pyc
ADDED
Binary file (12.8 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/utils/__pycache__/configs.cpython-310.pyc
ADDED
Binary file (9.98 kB). View file
|
|