Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__init__.py +25 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/divisive.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/asyn_fluid.py +151 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/centrality.py +171 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/community_utils.py +29 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/divisive.py +196 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/kclique.py +79 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/kernighan_lin.py +139 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/label_propagation.py +337 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/lukes.py +227 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/modularity_max.py +451 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/quality.py +346 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__init__.py +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py +136 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_centrality.py +84 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_divisive.py +106 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_kclique.py +91 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py +91 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_label_propagation.py +241 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_louvain.py +264 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_lukes.py +152 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_modularity_max.py +340 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_quality.py +138 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_utils.py +28 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2 +3 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2 +3 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2 +3 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/minors/tests/__pycache__/test_contraction.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/__init__.py +4 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/all.py +321 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/binary.py +448 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/product.py +630 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/__init__.py +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/__pycache__/test_binary.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/test_all.py +328 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/test_binary.py +471 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/test_product.py +491 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/test_unary.py +55 -0
- env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/unary.py +76 -0
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__init__.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing and measuring community structure.
|
2 |
+
|
3 |
+
The ``community`` subpackage can be accessed by using :mod:`networkx.community`, then accessing the
|
4 |
+
functions as attributes of ``community``. For example::
|
5 |
+
|
6 |
+
>>> import networkx as nx
|
7 |
+
>>> G = nx.barbell_graph(5, 1)
|
8 |
+
>>> communities_generator = nx.community.girvan_newman(G)
|
9 |
+
>>> top_level_communities = next(communities_generator)
|
10 |
+
>>> next_level_communities = next(communities_generator)
|
11 |
+
>>> sorted(map(sorted, next_level_communities))
|
12 |
+
[[0, 1, 2, 3, 4], [5], [6, 7, 8, 9, 10]]
|
13 |
+
|
14 |
+
"""
|
15 |
+
from networkx.algorithms.community.asyn_fluid import *
|
16 |
+
from networkx.algorithms.community.centrality import *
|
17 |
+
from networkx.algorithms.community.divisive import *
|
18 |
+
from networkx.algorithms.community.kclique import *
|
19 |
+
from networkx.algorithms.community.kernighan_lin import *
|
20 |
+
from networkx.algorithms.community.label_propagation import *
|
21 |
+
from networkx.algorithms.community.lukes import *
|
22 |
+
from networkx.algorithms.community.modularity_max import *
|
23 |
+
from networkx.algorithms.community.quality import *
|
24 |
+
from networkx.algorithms.community.community_utils import *
|
25 |
+
from networkx.algorithms.community.louvain import *
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (1.36 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-310.pyc
ADDED
Binary file (4.35 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-310.pyc
ADDED
Binary file (6.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-310.pyc
ADDED
Binary file (1.29 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/divisive.cpython-310.pyc
ADDED
Binary file (5.62 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-310.pyc
ADDED
Binary file (2.42 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-310.pyc
ADDED
Binary file (5.43 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-310.pyc
ADDED
Binary file (10.5 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-310.pyc
ADDED
Binary file (13.8 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-310.pyc
ADDED
Binary file (7.27 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-310.pyc
ADDED
Binary file (13.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-310.pyc
ADDED
Binary file (11.4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/asyn_fluid.py
ADDED
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Asynchronous Fluid Communities algorithm for community detection."""
|
2 |
+
|
3 |
+
from collections import Counter
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
from networkx.algorithms.components import is_connected
|
7 |
+
from networkx.exception import NetworkXError
|
8 |
+
from networkx.utils import groups, not_implemented_for, py_random_state
|
9 |
+
|
10 |
+
__all__ = ["asyn_fluidc"]
|
11 |
+
|
12 |
+
|
13 |
+
@not_implemented_for("directed")
|
14 |
+
@not_implemented_for("multigraph")
|
15 |
+
@py_random_state(3)
|
16 |
+
@nx._dispatchable
|
17 |
+
def asyn_fluidc(G, k, max_iter=100, seed=None):
|
18 |
+
"""Returns communities in `G` as detected by Fluid Communities algorithm.
|
19 |
+
|
20 |
+
The asynchronous fluid communities algorithm is described in
|
21 |
+
[1]_. The algorithm is based on the simple idea of fluids interacting
|
22 |
+
in an environment, expanding and pushing each other. Its initialization is
|
23 |
+
random, so found communities may vary on different executions.
|
24 |
+
|
25 |
+
The algorithm proceeds as follows. First each of the initial k communities
|
26 |
+
is initialized in a random vertex in the graph. Then the algorithm iterates
|
27 |
+
over all vertices in a random order, updating the community of each vertex
|
28 |
+
based on its own community and the communities of its neighbors. This
|
29 |
+
process is performed several times until convergence.
|
30 |
+
At all times, each community has a total density of 1, which is equally
|
31 |
+
distributed among the vertices it contains. If a vertex changes of
|
32 |
+
community, vertex densities of affected communities are adjusted
|
33 |
+
immediately. When a complete iteration over all vertices is done, such that
|
34 |
+
no vertex changes the community it belongs to, the algorithm has converged
|
35 |
+
and returns.
|
36 |
+
|
37 |
+
This is the original version of the algorithm described in [1]_.
|
38 |
+
Unfortunately, it does not support weighted graphs yet.
|
39 |
+
|
40 |
+
Parameters
|
41 |
+
----------
|
42 |
+
G : NetworkX graph
|
43 |
+
Graph must be simple and undirected.
|
44 |
+
|
45 |
+
k : integer
|
46 |
+
The number of communities to be found.
|
47 |
+
|
48 |
+
max_iter : integer
|
49 |
+
The number of maximum iterations allowed. By default 100.
|
50 |
+
|
51 |
+
seed : integer, random_state, or None (default)
|
52 |
+
Indicator of random number generation state.
|
53 |
+
See :ref:`Randomness<randomness>`.
|
54 |
+
|
55 |
+
Returns
|
56 |
+
-------
|
57 |
+
communities : iterable
|
58 |
+
Iterable of communities given as sets of nodes.
|
59 |
+
|
60 |
+
Notes
|
61 |
+
-----
|
62 |
+
k variable is not an optional argument.
|
63 |
+
|
64 |
+
References
|
65 |
+
----------
|
66 |
+
.. [1] Parés F., Garcia-Gasulla D. et al. "Fluid Communities: A
|
67 |
+
Competitive and Highly Scalable Community Detection Algorithm".
|
68 |
+
[https://arxiv.org/pdf/1703.09307.pdf].
|
69 |
+
"""
|
70 |
+
# Initial checks
|
71 |
+
if not isinstance(k, int):
|
72 |
+
raise NetworkXError("k must be an integer.")
|
73 |
+
if not k > 0:
|
74 |
+
raise NetworkXError("k must be greater than 0.")
|
75 |
+
if not is_connected(G):
|
76 |
+
raise NetworkXError("Fluid Communities require connected Graphs.")
|
77 |
+
if len(G) < k:
|
78 |
+
raise NetworkXError("k cannot be bigger than the number of nodes.")
|
79 |
+
# Initialization
|
80 |
+
max_density = 1.0
|
81 |
+
vertices = list(G)
|
82 |
+
seed.shuffle(vertices)
|
83 |
+
communities = {n: i for i, n in enumerate(vertices[:k])}
|
84 |
+
density = {}
|
85 |
+
com_to_numvertices = {}
|
86 |
+
for vertex in communities:
|
87 |
+
com_to_numvertices[communities[vertex]] = 1
|
88 |
+
density[communities[vertex]] = max_density
|
89 |
+
# Set up control variables and start iterating
|
90 |
+
iter_count = 0
|
91 |
+
cont = True
|
92 |
+
while cont:
|
93 |
+
cont = False
|
94 |
+
iter_count += 1
|
95 |
+
# Loop over all vertices in graph in a random order
|
96 |
+
vertices = list(G)
|
97 |
+
seed.shuffle(vertices)
|
98 |
+
for vertex in vertices:
|
99 |
+
# Updating rule
|
100 |
+
com_counter = Counter()
|
101 |
+
# Take into account self vertex community
|
102 |
+
try:
|
103 |
+
com_counter.update({communities[vertex]: density[communities[vertex]]})
|
104 |
+
except KeyError:
|
105 |
+
pass
|
106 |
+
# Gather neighbor vertex communities
|
107 |
+
for v in G[vertex]:
|
108 |
+
try:
|
109 |
+
com_counter.update({communities[v]: density[communities[v]]})
|
110 |
+
except KeyError:
|
111 |
+
continue
|
112 |
+
# Check which is the community with highest density
|
113 |
+
new_com = -1
|
114 |
+
if len(com_counter.keys()) > 0:
|
115 |
+
max_freq = max(com_counter.values())
|
116 |
+
best_communities = [
|
117 |
+
com
|
118 |
+
for com, freq in com_counter.items()
|
119 |
+
if (max_freq - freq) < 0.0001
|
120 |
+
]
|
121 |
+
# If actual vertex com in best communities, it is preserved
|
122 |
+
try:
|
123 |
+
if communities[vertex] in best_communities:
|
124 |
+
new_com = communities[vertex]
|
125 |
+
except KeyError:
|
126 |
+
pass
|
127 |
+
# If vertex community changes...
|
128 |
+
if new_com == -1:
|
129 |
+
# Set flag of non-convergence
|
130 |
+
cont = True
|
131 |
+
# Randomly chose a new community from candidates
|
132 |
+
new_com = seed.choice(best_communities)
|
133 |
+
# Update previous community status
|
134 |
+
try:
|
135 |
+
com_to_numvertices[communities[vertex]] -= 1
|
136 |
+
density[communities[vertex]] = (
|
137 |
+
max_density / com_to_numvertices[communities[vertex]]
|
138 |
+
)
|
139 |
+
except KeyError:
|
140 |
+
pass
|
141 |
+
# Update new community status
|
142 |
+
communities[vertex] = new_com
|
143 |
+
com_to_numvertices[communities[vertex]] += 1
|
144 |
+
density[communities[vertex]] = (
|
145 |
+
max_density / com_to_numvertices[communities[vertex]]
|
146 |
+
)
|
147 |
+
# If maximum iterations reached --> output actual results
|
148 |
+
if iter_count > max_iter:
|
149 |
+
break
|
150 |
+
# Return results by grouping communities as list of vertices
|
151 |
+
return iter(groups(communities).values())
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/centrality.py
ADDED
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing communities based on centrality notions."""
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = ["girvan_newman"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable(preserve_edge_attrs="most_valuable_edge")
|
9 |
+
def girvan_newman(G, most_valuable_edge=None):
|
10 |
+
"""Finds communities in a graph using the Girvan–Newman method.
|
11 |
+
|
12 |
+
Parameters
|
13 |
+
----------
|
14 |
+
G : NetworkX graph
|
15 |
+
|
16 |
+
most_valuable_edge : function
|
17 |
+
Function that takes a graph as input and outputs an edge. The
|
18 |
+
edge returned by this function will be recomputed and removed at
|
19 |
+
each iteration of the algorithm.
|
20 |
+
|
21 |
+
If not specified, the edge with the highest
|
22 |
+
:func:`networkx.edge_betweenness_centrality` will be used.
|
23 |
+
|
24 |
+
Returns
|
25 |
+
-------
|
26 |
+
iterator
|
27 |
+
Iterator over tuples of sets of nodes in `G`. Each set of node
|
28 |
+
is a community, each tuple is a sequence of communities at a
|
29 |
+
particular level of the algorithm.
|
30 |
+
|
31 |
+
Examples
|
32 |
+
--------
|
33 |
+
To get the first pair of communities::
|
34 |
+
|
35 |
+
>>> G = nx.path_graph(10)
|
36 |
+
>>> comp = nx.community.girvan_newman(G)
|
37 |
+
>>> tuple(sorted(c) for c in next(comp))
|
38 |
+
([0, 1, 2, 3, 4], [5, 6, 7, 8, 9])
|
39 |
+
|
40 |
+
To get only the first *k* tuples of communities, use
|
41 |
+
:func:`itertools.islice`::
|
42 |
+
|
43 |
+
>>> import itertools
|
44 |
+
>>> G = nx.path_graph(8)
|
45 |
+
>>> k = 2
|
46 |
+
>>> comp = nx.community.girvan_newman(G)
|
47 |
+
>>> for communities in itertools.islice(comp, k):
|
48 |
+
... print(tuple(sorted(c) for c in communities))
|
49 |
+
...
|
50 |
+
([0, 1, 2, 3], [4, 5, 6, 7])
|
51 |
+
([0, 1], [2, 3], [4, 5, 6, 7])
|
52 |
+
|
53 |
+
To stop getting tuples of communities once the number of communities
|
54 |
+
is greater than *k*, use :func:`itertools.takewhile`::
|
55 |
+
|
56 |
+
>>> import itertools
|
57 |
+
>>> G = nx.path_graph(8)
|
58 |
+
>>> k = 4
|
59 |
+
>>> comp = nx.community.girvan_newman(G)
|
60 |
+
>>> limited = itertools.takewhile(lambda c: len(c) <= k, comp)
|
61 |
+
>>> for communities in limited:
|
62 |
+
... print(tuple(sorted(c) for c in communities))
|
63 |
+
...
|
64 |
+
([0, 1, 2, 3], [4, 5, 6, 7])
|
65 |
+
([0, 1], [2, 3], [4, 5, 6, 7])
|
66 |
+
([0, 1], [2, 3], [4, 5], [6, 7])
|
67 |
+
|
68 |
+
To just choose an edge to remove based on the weight::
|
69 |
+
|
70 |
+
>>> from operator import itemgetter
|
71 |
+
>>> G = nx.path_graph(10)
|
72 |
+
>>> edges = G.edges()
|
73 |
+
>>> nx.set_edge_attributes(G, {(u, v): v for u, v in edges}, "weight")
|
74 |
+
>>> def heaviest(G):
|
75 |
+
... u, v, w = max(G.edges(data="weight"), key=itemgetter(2))
|
76 |
+
... return (u, v)
|
77 |
+
...
|
78 |
+
>>> comp = nx.community.girvan_newman(G, most_valuable_edge=heaviest)
|
79 |
+
>>> tuple(sorted(c) for c in next(comp))
|
80 |
+
([0, 1, 2, 3, 4, 5, 6, 7, 8], [9])
|
81 |
+
|
82 |
+
To utilize edge weights when choosing an edge with, for example, the
|
83 |
+
highest betweenness centrality::
|
84 |
+
|
85 |
+
>>> from networkx import edge_betweenness_centrality as betweenness
|
86 |
+
>>> def most_central_edge(G):
|
87 |
+
... centrality = betweenness(G, weight="weight")
|
88 |
+
... return max(centrality, key=centrality.get)
|
89 |
+
...
|
90 |
+
>>> G = nx.path_graph(10)
|
91 |
+
>>> comp = nx.community.girvan_newman(G, most_valuable_edge=most_central_edge)
|
92 |
+
>>> tuple(sorted(c) for c in next(comp))
|
93 |
+
([0, 1, 2, 3, 4], [5, 6, 7, 8, 9])
|
94 |
+
|
95 |
+
To specify a different ranking algorithm for edges, use the
|
96 |
+
`most_valuable_edge` keyword argument::
|
97 |
+
|
98 |
+
>>> from networkx import edge_betweenness_centrality
|
99 |
+
>>> from random import random
|
100 |
+
>>> def most_central_edge(G):
|
101 |
+
... centrality = edge_betweenness_centrality(G)
|
102 |
+
... max_cent = max(centrality.values())
|
103 |
+
... # Scale the centrality values so they are between 0 and 1,
|
104 |
+
... # and add some random noise.
|
105 |
+
... centrality = {e: c / max_cent for e, c in centrality.items()}
|
106 |
+
... # Add some random noise.
|
107 |
+
... centrality = {e: c + random() for e, c in centrality.items()}
|
108 |
+
... return max(centrality, key=centrality.get)
|
109 |
+
...
|
110 |
+
>>> G = nx.path_graph(10)
|
111 |
+
>>> comp = nx.community.girvan_newman(G, most_valuable_edge=most_central_edge)
|
112 |
+
|
113 |
+
Notes
|
114 |
+
-----
|
115 |
+
The Girvan–Newman algorithm detects communities by progressively
|
116 |
+
removing edges from the original graph. The algorithm removes the
|
117 |
+
"most valuable" edge, traditionally the edge with the highest
|
118 |
+
betweenness centrality, at each step. As the graph breaks down into
|
119 |
+
pieces, the tightly knit community structure is exposed and the
|
120 |
+
result can be depicted as a dendrogram.
|
121 |
+
|
122 |
+
"""
|
123 |
+
# If the graph is already empty, simply return its connected
|
124 |
+
# components.
|
125 |
+
if G.number_of_edges() == 0:
|
126 |
+
yield tuple(nx.connected_components(G))
|
127 |
+
return
|
128 |
+
# If no function is provided for computing the most valuable edge,
|
129 |
+
# use the edge betweenness centrality.
|
130 |
+
if most_valuable_edge is None:
|
131 |
+
|
132 |
+
def most_valuable_edge(G):
|
133 |
+
"""Returns the edge with the highest betweenness centrality
|
134 |
+
in the graph `G`.
|
135 |
+
|
136 |
+
"""
|
137 |
+
# We have guaranteed that the graph is non-empty, so this
|
138 |
+
# dictionary will never be empty.
|
139 |
+
betweenness = nx.edge_betweenness_centrality(G)
|
140 |
+
return max(betweenness, key=betweenness.get)
|
141 |
+
|
142 |
+
# The copy of G here must include the edge weight data.
|
143 |
+
g = G.copy().to_undirected()
|
144 |
+
# Self-loops must be removed because their removal has no effect on
|
145 |
+
# the connected components of the graph.
|
146 |
+
g.remove_edges_from(nx.selfloop_edges(g))
|
147 |
+
while g.number_of_edges() > 0:
|
148 |
+
yield _without_most_central_edges(g, most_valuable_edge)
|
149 |
+
|
150 |
+
|
151 |
+
def _without_most_central_edges(G, most_valuable_edge):
|
152 |
+
"""Returns the connected components of the graph that results from
|
153 |
+
repeatedly removing the most "valuable" edge in the graph.
|
154 |
+
|
155 |
+
`G` must be a non-empty graph. This function modifies the graph `G`
|
156 |
+
in-place; that is, it removes edges on the graph `G`.
|
157 |
+
|
158 |
+
`most_valuable_edge` is a function that takes the graph `G` as input
|
159 |
+
(or a subgraph with one or more edges of `G` removed) and returns an
|
160 |
+
edge. That edge will be removed and this process will be repeated
|
161 |
+
until the number of connected components in the graph increases.
|
162 |
+
|
163 |
+
"""
|
164 |
+
original_num_components = nx.number_connected_components(G)
|
165 |
+
num_new_components = original_num_components
|
166 |
+
while num_new_components <= original_num_components:
|
167 |
+
edge = most_valuable_edge(G)
|
168 |
+
G.remove_edge(*edge)
|
169 |
+
new_components = tuple(nx.connected_components(G))
|
170 |
+
num_new_components = len(new_components)
|
171 |
+
return new_components
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/community_utils.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Helper functions for community-finding algorithms."""
|
2 |
+
import networkx as nx
|
3 |
+
|
4 |
+
__all__ = ["is_partition"]
|
5 |
+
|
6 |
+
|
7 |
+
@nx._dispatchable
|
8 |
+
def is_partition(G, communities):
|
9 |
+
"""Returns *True* if `communities` is a partition of the nodes of `G`.
|
10 |
+
|
11 |
+
A partition of a universe set is a family of pairwise disjoint sets
|
12 |
+
whose union is the entire universe set.
|
13 |
+
|
14 |
+
Parameters
|
15 |
+
----------
|
16 |
+
G : NetworkX graph.
|
17 |
+
|
18 |
+
communities : list or iterable of sets of nodes
|
19 |
+
If not a list, the iterable is converted internally to a list.
|
20 |
+
If it is an iterator it is exhausted.
|
21 |
+
|
22 |
+
"""
|
23 |
+
# Alternate implementation:
|
24 |
+
# return all(sum(1 if v in c else 0 for c in communities) == 1 for v in G)
|
25 |
+
if not isinstance(communities, list):
|
26 |
+
communities = list(communities)
|
27 |
+
nodes = {n for c in communities for n in c if n in G}
|
28 |
+
|
29 |
+
return len(G) == len(nodes) == sum(len(c) for c in communities)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/divisive.py
ADDED
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = [
|
6 |
+
"edge_betweenness_partition",
|
7 |
+
"edge_current_flow_betweenness_partition",
|
8 |
+
]
|
9 |
+
|
10 |
+
|
11 |
+
@nx._dispatchable(edge_attrs="weight")
|
12 |
+
def edge_betweenness_partition(G, number_of_sets, *, weight=None):
|
13 |
+
"""Partition created by iteratively removing the highest edge betweenness edge.
|
14 |
+
|
15 |
+
This algorithm works by calculating the edge betweenness for all
|
16 |
+
edges and removing the edge with the highest value. It is then
|
17 |
+
determined whether the graph has been broken into at least
|
18 |
+
`number_of_sets` connected components.
|
19 |
+
If not the process is repeated.
|
20 |
+
|
21 |
+
Parameters
|
22 |
+
----------
|
23 |
+
G : NetworkX Graph, DiGraph or MultiGraph
|
24 |
+
Graph to be partitioned
|
25 |
+
|
26 |
+
number_of_sets : int
|
27 |
+
Number of sets in the desired partition of the graph
|
28 |
+
|
29 |
+
weight : key, optional, default=None
|
30 |
+
The key to use if using weights for edge betweenness calculation
|
31 |
+
|
32 |
+
Returns
|
33 |
+
-------
|
34 |
+
C : list of sets
|
35 |
+
Partition of the nodes of G
|
36 |
+
|
37 |
+
Raises
|
38 |
+
------
|
39 |
+
NetworkXError
|
40 |
+
If number_of_sets is <= 0 or if number_of_sets > len(G)
|
41 |
+
|
42 |
+
Examples
|
43 |
+
--------
|
44 |
+
>>> G = nx.karate_club_graph()
|
45 |
+
>>> part = nx.community.edge_betweenness_partition(G, 2)
|
46 |
+
>>> {0, 1, 3, 4, 5, 6, 7, 10, 11, 12, 13, 16, 17, 19, 21} in part
|
47 |
+
True
|
48 |
+
>>> {2, 8, 9, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part
|
49 |
+
True
|
50 |
+
|
51 |
+
See Also
|
52 |
+
--------
|
53 |
+
edge_current_flow_betweenness_partition
|
54 |
+
|
55 |
+
Notes
|
56 |
+
-----
|
57 |
+
This algorithm is fairly slow, as both the calculation of connected
|
58 |
+
components and edge betweenness relies on all pairs shortest
|
59 |
+
path algorithms. They could potentially be combined to cut down
|
60 |
+
on overall computation time.
|
61 |
+
|
62 |
+
References
|
63 |
+
----------
|
64 |
+
.. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
|
65 |
+
Volume 486, Issue 3-5 p. 75-174
|
66 |
+
http://arxiv.org/abs/0906.0612
|
67 |
+
"""
|
68 |
+
if number_of_sets <= 0:
|
69 |
+
raise nx.NetworkXError("number_of_sets must be >0")
|
70 |
+
if number_of_sets == 1:
|
71 |
+
return [set(G)]
|
72 |
+
if number_of_sets == len(G):
|
73 |
+
return [{n} for n in G]
|
74 |
+
if number_of_sets > len(G):
|
75 |
+
raise nx.NetworkXError("number_of_sets must be <= len(G)")
|
76 |
+
|
77 |
+
H = G.copy()
|
78 |
+
partition = list(nx.connected_components(H))
|
79 |
+
while len(partition) < number_of_sets:
|
80 |
+
ranking = nx.edge_betweenness_centrality(H, weight=weight)
|
81 |
+
edge = max(ranking, key=ranking.get)
|
82 |
+
H.remove_edge(*edge)
|
83 |
+
partition = list(nx.connected_components(H))
|
84 |
+
return partition
|
85 |
+
|
86 |
+
|
87 |
+
@nx._dispatchable(edge_attrs="weight")
|
88 |
+
def edge_current_flow_betweenness_partition(G, number_of_sets, *, weight=None):
|
89 |
+
"""Partition created by removing the highest edge current flow betweenness edge.
|
90 |
+
|
91 |
+
This algorithm works by calculating the edge current flow
|
92 |
+
betweenness for all edges and removing the edge with the
|
93 |
+
highest value. It is then determined whether the graph has
|
94 |
+
been broken into at least `number_of_sets` connected
|
95 |
+
components. If not the process is repeated.
|
96 |
+
|
97 |
+
Parameters
|
98 |
+
----------
|
99 |
+
G : NetworkX Graph, DiGraph or MultiGraph
|
100 |
+
Graph to be partitioned
|
101 |
+
|
102 |
+
number_of_sets : int
|
103 |
+
Number of sets in the desired partition of the graph
|
104 |
+
|
105 |
+
weight : key, optional (default=None)
|
106 |
+
The edge attribute key to use as weights for
|
107 |
+
edge current flow betweenness calculations
|
108 |
+
|
109 |
+
Returns
|
110 |
+
-------
|
111 |
+
C : list of sets
|
112 |
+
Partition of G
|
113 |
+
|
114 |
+
Raises
|
115 |
+
------
|
116 |
+
NetworkXError
|
117 |
+
If number_of_sets is <= 0 or number_of_sets > len(G)
|
118 |
+
|
119 |
+
Examples
|
120 |
+
--------
|
121 |
+
>>> G = nx.karate_club_graph()
|
122 |
+
>>> part = nx.community.edge_current_flow_betweenness_partition(G, 2)
|
123 |
+
>>> {0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 16, 17, 19, 21} in part
|
124 |
+
True
|
125 |
+
>>> {8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part
|
126 |
+
True
|
127 |
+
|
128 |
+
|
129 |
+
See Also
|
130 |
+
--------
|
131 |
+
edge_betweenness_partition
|
132 |
+
|
133 |
+
Notes
|
134 |
+
-----
|
135 |
+
This algorithm is extremely slow, as the recalculation of the edge
|
136 |
+
current flow betweenness is extremely slow.
|
137 |
+
|
138 |
+
References
|
139 |
+
----------
|
140 |
+
.. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
|
141 |
+
Volume 486, Issue 3-5 p. 75-174
|
142 |
+
http://arxiv.org/abs/0906.0612
|
143 |
+
"""
|
144 |
+
if number_of_sets <= 0:
|
145 |
+
raise nx.NetworkXError("number_of_sets must be >0")
|
146 |
+
elif number_of_sets == 1:
|
147 |
+
return [set(G)]
|
148 |
+
elif number_of_sets == len(G):
|
149 |
+
return [{n} for n in G]
|
150 |
+
elif number_of_sets > len(G):
|
151 |
+
raise nx.NetworkXError("number_of_sets must be <= len(G)")
|
152 |
+
|
153 |
+
rank = functools.partial(
|
154 |
+
nx.edge_current_flow_betweenness_centrality, normalized=False, weight=weight
|
155 |
+
)
|
156 |
+
|
157 |
+
# current flow requires a connected network so we track the components explicitly
|
158 |
+
H = G.copy()
|
159 |
+
partition = list(nx.connected_components(H))
|
160 |
+
if len(partition) > 1:
|
161 |
+
Hcc_subgraphs = [H.subgraph(cc).copy() for cc in partition]
|
162 |
+
else:
|
163 |
+
Hcc_subgraphs = [H]
|
164 |
+
|
165 |
+
ranking = {}
|
166 |
+
for Hcc in Hcc_subgraphs:
|
167 |
+
ranking.update(rank(Hcc))
|
168 |
+
|
169 |
+
while len(partition) < number_of_sets:
|
170 |
+
edge = max(ranking, key=ranking.get)
|
171 |
+
for cc, Hcc in zip(partition, Hcc_subgraphs):
|
172 |
+
if edge[0] in cc:
|
173 |
+
Hcc.remove_edge(*edge)
|
174 |
+
del ranking[edge]
|
175 |
+
splitcc_list = list(nx.connected_components(Hcc))
|
176 |
+
if len(splitcc_list) > 1:
|
177 |
+
# there are 2 connected components. split off smaller one
|
178 |
+
cc_new = min(splitcc_list, key=len)
|
179 |
+
Hcc_new = Hcc.subgraph(cc_new).copy()
|
180 |
+
# update edge rankings for Hcc_new
|
181 |
+
newranks = rank(Hcc_new)
|
182 |
+
for e, r in newranks.items():
|
183 |
+
ranking[e if e in ranking else e[::-1]] = r
|
184 |
+
# append new cc and Hcc to their lists.
|
185 |
+
partition.append(cc_new)
|
186 |
+
Hcc_subgraphs.append(Hcc_new)
|
187 |
+
|
188 |
+
# leave existing cc and Hcc in their lists, but shrink them
|
189 |
+
Hcc.remove_nodes_from(cc_new)
|
190 |
+
cc.difference_update(cc_new)
|
191 |
+
# update edge rankings for Hcc whether it was split or not
|
192 |
+
newranks = rank(Hcc)
|
193 |
+
for e, r in newranks.items():
|
194 |
+
ranking[e if e in ranking else e[::-1]] = r
|
195 |
+
break
|
196 |
+
return partition
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/kclique.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections import defaultdict
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = ["k_clique_communities"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable
|
9 |
+
def k_clique_communities(G, k, cliques=None):
|
10 |
+
"""Find k-clique communities in graph using the percolation method.
|
11 |
+
|
12 |
+
A k-clique community is the union of all cliques of size k that
|
13 |
+
can be reached through adjacent (sharing k-1 nodes) k-cliques.
|
14 |
+
|
15 |
+
Parameters
|
16 |
+
----------
|
17 |
+
G : NetworkX graph
|
18 |
+
|
19 |
+
k : int
|
20 |
+
Size of smallest clique
|
21 |
+
|
22 |
+
cliques: list or generator
|
23 |
+
Precomputed cliques (use networkx.find_cliques(G))
|
24 |
+
|
25 |
+
Returns
|
26 |
+
-------
|
27 |
+
Yields sets of nodes, one for each k-clique community.
|
28 |
+
|
29 |
+
Examples
|
30 |
+
--------
|
31 |
+
>>> G = nx.complete_graph(5)
|
32 |
+
>>> K5 = nx.convert_node_labels_to_integers(G, first_label=2)
|
33 |
+
>>> G.add_edges_from(K5.edges())
|
34 |
+
>>> c = list(nx.community.k_clique_communities(G, 4))
|
35 |
+
>>> sorted(list(c[0]))
|
36 |
+
[0, 1, 2, 3, 4, 5, 6]
|
37 |
+
>>> list(nx.community.k_clique_communities(G, 6))
|
38 |
+
[]
|
39 |
+
|
40 |
+
References
|
41 |
+
----------
|
42 |
+
.. [1] Gergely Palla, Imre Derényi, Illés Farkas1, and Tamás Vicsek,
|
43 |
+
Uncovering the overlapping community structure of complex networks
|
44 |
+
in nature and society Nature 435, 814-818, 2005,
|
45 |
+
doi:10.1038/nature03607
|
46 |
+
"""
|
47 |
+
if k < 2:
|
48 |
+
raise nx.NetworkXError(f"k={k}, k must be greater than 1.")
|
49 |
+
if cliques is None:
|
50 |
+
cliques = nx.find_cliques(G)
|
51 |
+
cliques = [frozenset(c) for c in cliques if len(c) >= k]
|
52 |
+
|
53 |
+
# First index which nodes are in which cliques
|
54 |
+
membership_dict = defaultdict(list)
|
55 |
+
for clique in cliques:
|
56 |
+
for node in clique:
|
57 |
+
membership_dict[node].append(clique)
|
58 |
+
|
59 |
+
# For each clique, see which adjacent cliques percolate
|
60 |
+
perc_graph = nx.Graph()
|
61 |
+
perc_graph.add_nodes_from(cliques)
|
62 |
+
for clique in cliques:
|
63 |
+
for adj_clique in _get_adjacent_cliques(clique, membership_dict):
|
64 |
+
if len(clique.intersection(adj_clique)) >= (k - 1):
|
65 |
+
perc_graph.add_edge(clique, adj_clique)
|
66 |
+
|
67 |
+
# Connected components of clique graph with perc edges
|
68 |
+
# are the percolated cliques
|
69 |
+
for component in nx.connected_components(perc_graph):
|
70 |
+
yield (frozenset.union(*component))
|
71 |
+
|
72 |
+
|
73 |
+
def _get_adjacent_cliques(clique, membership_dict):
|
74 |
+
adjacent_cliques = set()
|
75 |
+
for n in clique:
|
76 |
+
for adj_clique in membership_dict[n]:
|
77 |
+
if clique != adj_clique:
|
78 |
+
adjacent_cliques.add(adj_clique)
|
79 |
+
return adjacent_cliques
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/kernighan_lin.py
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing the Kernighan–Lin bipartition algorithm."""
|
2 |
+
|
3 |
+
from itertools import count
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
from networkx.algorithms.community.community_utils import is_partition
|
7 |
+
from networkx.utils import BinaryHeap, not_implemented_for, py_random_state
|
8 |
+
|
9 |
+
__all__ = ["kernighan_lin_bisection"]
|
10 |
+
|
11 |
+
|
12 |
+
def _kernighan_lin_sweep(edges, side):
|
13 |
+
"""
|
14 |
+
This is a modified form of Kernighan-Lin, which moves single nodes at a
|
15 |
+
time, alternating between sides to keep the bisection balanced. We keep
|
16 |
+
two min-heaps of swap costs to make optimal-next-move selection fast.
|
17 |
+
"""
|
18 |
+
costs0, costs1 = costs = BinaryHeap(), BinaryHeap()
|
19 |
+
for u, side_u, edges_u in zip(count(), side, edges):
|
20 |
+
cost_u = sum(w if side[v] else -w for v, w in edges_u)
|
21 |
+
costs[side_u].insert(u, cost_u if side_u else -cost_u)
|
22 |
+
|
23 |
+
def _update_costs(costs_x, x):
|
24 |
+
for y, w in edges[x]:
|
25 |
+
costs_y = costs[side[y]]
|
26 |
+
cost_y = costs_y.get(y)
|
27 |
+
if cost_y is not None:
|
28 |
+
cost_y += 2 * (-w if costs_x is costs_y else w)
|
29 |
+
costs_y.insert(y, cost_y, True)
|
30 |
+
|
31 |
+
i = 0
|
32 |
+
totcost = 0
|
33 |
+
while costs0 and costs1:
|
34 |
+
u, cost_u = costs0.pop()
|
35 |
+
_update_costs(costs0, u)
|
36 |
+
v, cost_v = costs1.pop()
|
37 |
+
_update_costs(costs1, v)
|
38 |
+
totcost += cost_u + cost_v
|
39 |
+
i += 1
|
40 |
+
yield totcost, i, (u, v)
|
41 |
+
|
42 |
+
|
43 |
+
@not_implemented_for("directed")
|
44 |
+
@py_random_state(4)
|
45 |
+
@nx._dispatchable(edge_attrs="weight")
|
46 |
+
def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None):
|
47 |
+
"""Partition a graph into two blocks using the Kernighan–Lin
|
48 |
+
algorithm.
|
49 |
+
|
50 |
+
This algorithm partitions a network into two sets by iteratively
|
51 |
+
swapping pairs of nodes to reduce the edge cut between the two sets. The
|
52 |
+
pairs are chosen according to a modified form of Kernighan-Lin [1]_, which
|
53 |
+
moves node individually, alternating between sides to keep the bisection
|
54 |
+
balanced.
|
55 |
+
|
56 |
+
Parameters
|
57 |
+
----------
|
58 |
+
G : NetworkX graph
|
59 |
+
Graph must be undirected.
|
60 |
+
|
61 |
+
partition : tuple
|
62 |
+
Pair of iterables containing an initial partition. If not
|
63 |
+
specified, a random balanced partition is used.
|
64 |
+
|
65 |
+
max_iter : int
|
66 |
+
Maximum number of times to attempt swaps to find an
|
67 |
+
improvement before giving up.
|
68 |
+
|
69 |
+
weight : key
|
70 |
+
Edge data key to use as weight. If None, the weights are all
|
71 |
+
set to one.
|
72 |
+
|
73 |
+
seed : integer, random_state, or None (default)
|
74 |
+
Indicator of random number generation state.
|
75 |
+
See :ref:`Randomness<randomness>`.
|
76 |
+
Only used if partition is None
|
77 |
+
|
78 |
+
Returns
|
79 |
+
-------
|
80 |
+
partition : tuple
|
81 |
+
A pair of sets of nodes representing the bipartition.
|
82 |
+
|
83 |
+
Raises
|
84 |
+
------
|
85 |
+
NetworkXError
|
86 |
+
If partition is not a valid partition of the nodes of the graph.
|
87 |
+
|
88 |
+
References
|
89 |
+
----------
|
90 |
+
.. [1] Kernighan, B. W.; Lin, Shen (1970).
|
91 |
+
"An efficient heuristic procedure for partitioning graphs."
|
92 |
+
*Bell Systems Technical Journal* 49: 291--307.
|
93 |
+
Oxford University Press 2011.
|
94 |
+
|
95 |
+
"""
|
96 |
+
n = len(G)
|
97 |
+
labels = list(G)
|
98 |
+
seed.shuffle(labels)
|
99 |
+
index = {v: i for i, v in enumerate(labels)}
|
100 |
+
|
101 |
+
if partition is None:
|
102 |
+
side = [0] * (n // 2) + [1] * ((n + 1) // 2)
|
103 |
+
else:
|
104 |
+
try:
|
105 |
+
A, B = partition
|
106 |
+
except (TypeError, ValueError) as err:
|
107 |
+
raise nx.NetworkXError("partition must be two sets") from err
|
108 |
+
if not is_partition(G, (A, B)):
|
109 |
+
raise nx.NetworkXError("partition invalid")
|
110 |
+
side = [0] * n
|
111 |
+
for a in A:
|
112 |
+
side[index[a]] = 1
|
113 |
+
|
114 |
+
if G.is_multigraph():
|
115 |
+
edges = [
|
116 |
+
[
|
117 |
+
(index[u], sum(e.get(weight, 1) for e in d.values()))
|
118 |
+
for u, d in G[v].items()
|
119 |
+
]
|
120 |
+
for v in labels
|
121 |
+
]
|
122 |
+
else:
|
123 |
+
edges = [
|
124 |
+
[(index[u], e.get(weight, 1)) for u, e in G[v].items()] for v in labels
|
125 |
+
]
|
126 |
+
|
127 |
+
for i in range(max_iter):
|
128 |
+
costs = list(_kernighan_lin_sweep(edges, side))
|
129 |
+
min_cost, min_i, _ = min(costs)
|
130 |
+
if min_cost >= 0:
|
131 |
+
break
|
132 |
+
|
133 |
+
for _, _, (u, v) in costs[:min_i]:
|
134 |
+
side[u] = 1
|
135 |
+
side[v] = 0
|
136 |
+
|
137 |
+
A = {u for u, s in zip(labels, side) if s == 0}
|
138 |
+
B = {u for u, s in zip(labels, side) if s == 1}
|
139 |
+
return A, B
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/label_propagation.py
ADDED
@@ -0,0 +1,337 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Label propagation community detection algorithms.
|
3 |
+
"""
|
4 |
+
from collections import Counter, defaultdict, deque
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils import groups, not_implemented_for, py_random_state
|
8 |
+
|
9 |
+
__all__ = [
|
10 |
+
"label_propagation_communities",
|
11 |
+
"asyn_lpa_communities",
|
12 |
+
"fast_label_propagation_communities",
|
13 |
+
]
|
14 |
+
|
15 |
+
|
16 |
+
@py_random_state("seed")
|
17 |
+
@nx._dispatchable(edge_attrs="weight")
|
18 |
+
def fast_label_propagation_communities(G, *, weight=None, seed=None):
|
19 |
+
"""Returns communities in `G` as detected by fast label propagation.
|
20 |
+
|
21 |
+
The fast label propagation algorithm is described in [1]_. The algorithm is
|
22 |
+
probabilistic and the found communities may vary in different executions.
|
23 |
+
|
24 |
+
The algorithm operates as follows. First, the community label of each node is
|
25 |
+
set to a unique label. The algorithm then repeatedly updates the labels of
|
26 |
+
the nodes to the most frequent label in their neighborhood. In case of ties,
|
27 |
+
a random label is chosen from the most frequent labels.
|
28 |
+
|
29 |
+
The algorithm maintains a queue of nodes that still need to be processed.
|
30 |
+
Initially, all nodes are added to the queue in a random order. Then the nodes
|
31 |
+
are removed from the queue one by one and processed. If a node updates its label,
|
32 |
+
all its neighbors that have a different label are added to the queue (if not
|
33 |
+
already in the queue). The algorithm stops when the queue is empty.
|
34 |
+
|
35 |
+
Parameters
|
36 |
+
----------
|
37 |
+
G : Graph, DiGraph, MultiGraph, or MultiDiGraph
|
38 |
+
Any NetworkX graph.
|
39 |
+
|
40 |
+
weight : string, or None (default)
|
41 |
+
The edge attribute representing a non-negative weight of an edge. If None,
|
42 |
+
each edge is assumed to have weight one. The weight of an edge is used in
|
43 |
+
determining the frequency with which a label appears among the neighbors of
|
44 |
+
a node (edge with weight `w` is equivalent to `w` unweighted edges).
|
45 |
+
|
46 |
+
seed : integer, random_state, or None (default)
|
47 |
+
Indicator of random number generation state. See :ref:`Randomness<randomness>`.
|
48 |
+
|
49 |
+
Returns
|
50 |
+
-------
|
51 |
+
communities : iterable
|
52 |
+
Iterable of communities given as sets of nodes.
|
53 |
+
|
54 |
+
Notes
|
55 |
+
-----
|
56 |
+
Edge directions are ignored for directed graphs.
|
57 |
+
Edge weights must be non-negative numbers.
|
58 |
+
|
59 |
+
References
|
60 |
+
----------
|
61 |
+
.. [1] Vincent A. Traag & Lovro Šubelj. "Large network community detection by
|
62 |
+
fast label propagation." Scientific Reports 13 (2023): 2701.
|
63 |
+
https://doi.org/10.1038/s41598-023-29610-z
|
64 |
+
"""
|
65 |
+
|
66 |
+
# Queue of nodes to be processed.
|
67 |
+
nodes_queue = deque(G)
|
68 |
+
seed.shuffle(nodes_queue)
|
69 |
+
|
70 |
+
# Set of nodes in the queue.
|
71 |
+
nodes_set = set(G)
|
72 |
+
|
73 |
+
# Assign unique label to each node.
|
74 |
+
comms = {node: i for i, node in enumerate(G)}
|
75 |
+
|
76 |
+
while nodes_queue:
|
77 |
+
# Remove next node from the queue to process.
|
78 |
+
node = nodes_queue.popleft()
|
79 |
+
nodes_set.remove(node)
|
80 |
+
|
81 |
+
# Isolated nodes retain their initial label.
|
82 |
+
if G.degree(node) > 0:
|
83 |
+
# Compute frequency of labels in node's neighborhood.
|
84 |
+
label_freqs = _fast_label_count(G, comms, node, weight)
|
85 |
+
max_freq = max(label_freqs.values())
|
86 |
+
|
87 |
+
# Always sample new label from most frequent labels.
|
88 |
+
comm = seed.choice(
|
89 |
+
[comm for comm in label_freqs if label_freqs[comm] == max_freq]
|
90 |
+
)
|
91 |
+
|
92 |
+
if comms[node] != comm:
|
93 |
+
comms[node] = comm
|
94 |
+
|
95 |
+
# Add neighbors that have different label to the queue.
|
96 |
+
for nbr in nx.all_neighbors(G, node):
|
97 |
+
if comms[nbr] != comm and nbr not in nodes_set:
|
98 |
+
nodes_queue.append(nbr)
|
99 |
+
nodes_set.add(nbr)
|
100 |
+
|
101 |
+
yield from groups(comms).values()
|
102 |
+
|
103 |
+
|
104 |
+
def _fast_label_count(G, comms, node, weight=None):
|
105 |
+
"""Computes the frequency of labels in the neighborhood of a node.
|
106 |
+
|
107 |
+
Returns a dictionary keyed by label to the frequency of that label.
|
108 |
+
"""
|
109 |
+
|
110 |
+
if weight is None:
|
111 |
+
# Unweighted (un)directed simple graph.
|
112 |
+
if not G.is_multigraph():
|
113 |
+
label_freqs = Counter(map(comms.get, nx.all_neighbors(G, node)))
|
114 |
+
|
115 |
+
# Unweighted (un)directed multigraph.
|
116 |
+
else:
|
117 |
+
label_freqs = defaultdict(int)
|
118 |
+
for nbr in G[node]:
|
119 |
+
label_freqs[comms[nbr]] += len(G[node][nbr])
|
120 |
+
|
121 |
+
if G.is_directed():
|
122 |
+
for nbr in G.pred[node]:
|
123 |
+
label_freqs[comms[nbr]] += len(G.pred[node][nbr])
|
124 |
+
|
125 |
+
else:
|
126 |
+
# Weighted undirected simple/multigraph.
|
127 |
+
label_freqs = defaultdict(float)
|
128 |
+
for _, nbr, w in G.edges(node, data=weight, default=1):
|
129 |
+
label_freqs[comms[nbr]] += w
|
130 |
+
|
131 |
+
# Weighted directed simple/multigraph.
|
132 |
+
if G.is_directed():
|
133 |
+
for nbr, _, w in G.in_edges(node, data=weight, default=1):
|
134 |
+
label_freqs[comms[nbr]] += w
|
135 |
+
|
136 |
+
return label_freqs
|
137 |
+
|
138 |
+
|
139 |
+
@py_random_state(2)
|
140 |
+
@nx._dispatchable(edge_attrs="weight")
|
141 |
+
def asyn_lpa_communities(G, weight=None, seed=None):
|
142 |
+
"""Returns communities in `G` as detected by asynchronous label
|
143 |
+
propagation.
|
144 |
+
|
145 |
+
The asynchronous label propagation algorithm is described in
|
146 |
+
[1]_. The algorithm is probabilistic and the found communities may
|
147 |
+
vary on different executions.
|
148 |
+
|
149 |
+
The algorithm proceeds as follows. After initializing each node with
|
150 |
+
a unique label, the algorithm repeatedly sets the label of a node to
|
151 |
+
be the label that appears most frequently among that nodes
|
152 |
+
neighbors. The algorithm halts when each node has the label that
|
153 |
+
appears most frequently among its neighbors. The algorithm is
|
154 |
+
asynchronous because each node is updated without waiting for
|
155 |
+
updates on the remaining nodes.
|
156 |
+
|
157 |
+
This generalized version of the algorithm in [1]_ accepts edge
|
158 |
+
weights.
|
159 |
+
|
160 |
+
Parameters
|
161 |
+
----------
|
162 |
+
G : Graph
|
163 |
+
|
164 |
+
weight : string
|
165 |
+
The edge attribute representing the weight of an edge.
|
166 |
+
If None, each edge is assumed to have weight one. In this
|
167 |
+
algorithm, the weight of an edge is used in determining the
|
168 |
+
frequency with which a label appears among the neighbors of a
|
169 |
+
node: a higher weight means the label appears more often.
|
170 |
+
|
171 |
+
seed : integer, random_state, or None (default)
|
172 |
+
Indicator of random number generation state.
|
173 |
+
See :ref:`Randomness<randomness>`.
|
174 |
+
|
175 |
+
Returns
|
176 |
+
-------
|
177 |
+
communities : iterable
|
178 |
+
Iterable of communities given as sets of nodes.
|
179 |
+
|
180 |
+
Notes
|
181 |
+
-----
|
182 |
+
Edge weight attributes must be numerical.
|
183 |
+
|
184 |
+
References
|
185 |
+
----------
|
186 |
+
.. [1] Raghavan, Usha Nandini, Réka Albert, and Soundar Kumara. "Near
|
187 |
+
linear time algorithm to detect community structures in large-scale
|
188 |
+
networks." Physical Review E 76.3 (2007): 036106.
|
189 |
+
"""
|
190 |
+
|
191 |
+
labels = {n: i for i, n in enumerate(G)}
|
192 |
+
cont = True
|
193 |
+
|
194 |
+
while cont:
|
195 |
+
cont = False
|
196 |
+
nodes = list(G)
|
197 |
+
seed.shuffle(nodes)
|
198 |
+
|
199 |
+
for node in nodes:
|
200 |
+
if not G[node]:
|
201 |
+
continue
|
202 |
+
|
203 |
+
# Get label frequencies among adjacent nodes.
|
204 |
+
# Depending on the order they are processed in,
|
205 |
+
# some nodes will be in iteration t and others in t-1,
|
206 |
+
# making the algorithm asynchronous.
|
207 |
+
if weight is None:
|
208 |
+
# initialising a Counter from an iterator of labels is
|
209 |
+
# faster for getting unweighted label frequencies
|
210 |
+
label_freq = Counter(map(labels.get, G[node]))
|
211 |
+
else:
|
212 |
+
# updating a defaultdict is substantially faster
|
213 |
+
# for getting weighted label frequencies
|
214 |
+
label_freq = defaultdict(float)
|
215 |
+
for _, v, wt in G.edges(node, data=weight, default=1):
|
216 |
+
label_freq[labels[v]] += wt
|
217 |
+
|
218 |
+
# Get the labels that appear with maximum frequency.
|
219 |
+
max_freq = max(label_freq.values())
|
220 |
+
best_labels = [
|
221 |
+
label for label, freq in label_freq.items() if freq == max_freq
|
222 |
+
]
|
223 |
+
|
224 |
+
# If the node does not have one of the maximum frequency labels,
|
225 |
+
# randomly choose one of them and update the node's label.
|
226 |
+
# Continue the iteration as long as at least one node
|
227 |
+
# doesn't have a maximum frequency label.
|
228 |
+
if labels[node] not in best_labels:
|
229 |
+
labels[node] = seed.choice(best_labels)
|
230 |
+
cont = True
|
231 |
+
|
232 |
+
yield from groups(labels).values()
|
233 |
+
|
234 |
+
|
235 |
+
@not_implemented_for("directed")
|
236 |
+
@nx._dispatchable
|
237 |
+
def label_propagation_communities(G):
|
238 |
+
"""Generates community sets determined by label propagation
|
239 |
+
|
240 |
+
Finds communities in `G` using a semi-synchronous label propagation
|
241 |
+
method [1]_. This method combines the advantages of both the synchronous
|
242 |
+
and asynchronous models. Not implemented for directed graphs.
|
243 |
+
|
244 |
+
Parameters
|
245 |
+
----------
|
246 |
+
G : graph
|
247 |
+
An undirected NetworkX graph.
|
248 |
+
|
249 |
+
Returns
|
250 |
+
-------
|
251 |
+
communities : iterable
|
252 |
+
A dict_values object that contains a set of nodes for each community.
|
253 |
+
|
254 |
+
Raises
|
255 |
+
------
|
256 |
+
NetworkXNotImplemented
|
257 |
+
If the graph is directed
|
258 |
+
|
259 |
+
References
|
260 |
+
----------
|
261 |
+
.. [1] Cordasco, G., & Gargano, L. (2010, December). Community detection
|
262 |
+
via semi-synchronous label propagation algorithms. In Business
|
263 |
+
Applications of Social Network Analysis (BASNA), 2010 IEEE International
|
264 |
+
Workshop on (pp. 1-8). IEEE.
|
265 |
+
"""
|
266 |
+
coloring = _color_network(G)
|
267 |
+
# Create a unique label for each node in the graph
|
268 |
+
labeling = {v: k for k, v in enumerate(G)}
|
269 |
+
while not _labeling_complete(labeling, G):
|
270 |
+
# Update the labels of every node with the same color.
|
271 |
+
for color, nodes in coloring.items():
|
272 |
+
for n in nodes:
|
273 |
+
_update_label(n, labeling, G)
|
274 |
+
|
275 |
+
clusters = defaultdict(set)
|
276 |
+
for node, label in labeling.items():
|
277 |
+
clusters[label].add(node)
|
278 |
+
return clusters.values()
|
279 |
+
|
280 |
+
|
281 |
+
def _color_network(G):
|
282 |
+
"""Colors the network so that neighboring nodes all have distinct colors.
|
283 |
+
|
284 |
+
Returns a dict keyed by color to a set of nodes with that color.
|
285 |
+
"""
|
286 |
+
coloring = {} # color => set(node)
|
287 |
+
colors = nx.coloring.greedy_color(G)
|
288 |
+
for node, color in colors.items():
|
289 |
+
if color in coloring:
|
290 |
+
coloring[color].add(node)
|
291 |
+
else:
|
292 |
+
coloring[color] = {node}
|
293 |
+
return coloring
|
294 |
+
|
295 |
+
|
296 |
+
def _labeling_complete(labeling, G):
|
297 |
+
"""Determines whether or not LPA is done.
|
298 |
+
|
299 |
+
Label propagation is complete when all nodes have a label that is
|
300 |
+
in the set of highest frequency labels amongst its neighbors.
|
301 |
+
|
302 |
+
Nodes with no neighbors are considered complete.
|
303 |
+
"""
|
304 |
+
return all(
|
305 |
+
labeling[v] in _most_frequent_labels(v, labeling, G) for v in G if len(G[v]) > 0
|
306 |
+
)
|
307 |
+
|
308 |
+
|
309 |
+
def _most_frequent_labels(node, labeling, G):
|
310 |
+
"""Returns a set of all labels with maximum frequency in `labeling`.
|
311 |
+
|
312 |
+
Input `labeling` should be a dict keyed by node to labels.
|
313 |
+
"""
|
314 |
+
if not G[node]:
|
315 |
+
# Nodes with no neighbors are themselves a community and are labeled
|
316 |
+
# accordingly, hence the immediate if statement.
|
317 |
+
return {labeling[node]}
|
318 |
+
|
319 |
+
# Compute the frequencies of all neighbors of node
|
320 |
+
freqs = Counter(labeling[q] for q in G[node])
|
321 |
+
max_freq = max(freqs.values())
|
322 |
+
return {label for label, freq in freqs.items() if freq == max_freq}
|
323 |
+
|
324 |
+
|
325 |
+
def _update_label(node, labeling, G):
|
326 |
+
"""Updates the label of a node using the Prec-Max tie breaking algorithm
|
327 |
+
|
328 |
+
The algorithm is explained in: 'Community Detection via Semi-Synchronous
|
329 |
+
Label Propagation Algorithms' Cordasco and Gargano, 2011
|
330 |
+
"""
|
331 |
+
high_labels = _most_frequent_labels(node, labeling, G)
|
332 |
+
if len(high_labels) == 1:
|
333 |
+
labeling[node] = high_labels.pop()
|
334 |
+
elif len(high_labels) > 1:
|
335 |
+
# Prec-Max
|
336 |
+
if labeling[node] not in high_labels:
|
337 |
+
labeling[node] = max(high_labels)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/lukes.py
ADDED
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Lukes Algorithm for exact optimal weighted tree partitioning."""
|
2 |
+
|
3 |
+
from copy import deepcopy
|
4 |
+
from functools import lru_cache
|
5 |
+
from random import choice
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
from networkx.utils import not_implemented_for
|
9 |
+
|
10 |
+
__all__ = ["lukes_partitioning"]
|
11 |
+
|
12 |
+
D_EDGE_W = "weight"
|
13 |
+
D_EDGE_VALUE = 1.0
|
14 |
+
D_NODE_W = "weight"
|
15 |
+
D_NODE_VALUE = 1
|
16 |
+
PKEY = "partitions"
|
17 |
+
CLUSTER_EVAL_CACHE_SIZE = 2048
|
18 |
+
|
19 |
+
|
20 |
+
def _split_n_from(n, min_size_of_first_part):
|
21 |
+
# splits j in two parts of which the first is at least
|
22 |
+
# the second argument
|
23 |
+
assert n >= min_size_of_first_part
|
24 |
+
for p1 in range(min_size_of_first_part, n + 1):
|
25 |
+
yield p1, n - p1
|
26 |
+
|
27 |
+
|
28 |
+
@nx._dispatchable(node_attrs="node_weight", edge_attrs="edge_weight")
|
29 |
+
def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
|
30 |
+
"""Optimal partitioning of a weighted tree using the Lukes algorithm.
|
31 |
+
|
32 |
+
This algorithm partitions a connected, acyclic graph featuring integer
|
33 |
+
node weights and float edge weights. The resulting clusters are such
|
34 |
+
that the total weight of the nodes in each cluster does not exceed
|
35 |
+
max_size and that the weight of the edges that are cut by the partition
|
36 |
+
is minimum. The algorithm is based on [1]_.
|
37 |
+
|
38 |
+
Parameters
|
39 |
+
----------
|
40 |
+
G : NetworkX graph
|
41 |
+
|
42 |
+
max_size : int
|
43 |
+
Maximum weight a partition can have in terms of sum of
|
44 |
+
node_weight for all nodes in the partition
|
45 |
+
|
46 |
+
edge_weight : key
|
47 |
+
Edge data key to use as weight. If None, the weights are all
|
48 |
+
set to one.
|
49 |
+
|
50 |
+
node_weight : key
|
51 |
+
Node data key to use as weight. If None, the weights are all
|
52 |
+
set to one. The data must be int.
|
53 |
+
|
54 |
+
Returns
|
55 |
+
-------
|
56 |
+
partition : list
|
57 |
+
A list of sets of nodes representing the clusters of the
|
58 |
+
partition.
|
59 |
+
|
60 |
+
Raises
|
61 |
+
------
|
62 |
+
NotATree
|
63 |
+
If G is not a tree.
|
64 |
+
TypeError
|
65 |
+
If any of the values of node_weight is not int.
|
66 |
+
|
67 |
+
References
|
68 |
+
----------
|
69 |
+
.. [1] Lukes, J. A. (1974).
|
70 |
+
"Efficient Algorithm for the Partitioning of Trees."
|
71 |
+
IBM Journal of Research and Development, 18(3), 217–224.
|
72 |
+
|
73 |
+
"""
|
74 |
+
# First sanity check and tree preparation
|
75 |
+
if not nx.is_tree(G):
|
76 |
+
raise nx.NotATree("lukes_partitioning works only on trees")
|
77 |
+
else:
|
78 |
+
if nx.is_directed(G):
|
79 |
+
root = [n for n, d in G.in_degree() if d == 0]
|
80 |
+
assert len(root) == 1
|
81 |
+
root = root[0]
|
82 |
+
t_G = deepcopy(G)
|
83 |
+
else:
|
84 |
+
root = choice(list(G.nodes))
|
85 |
+
# this has the desirable side effect of not inheriting attributes
|
86 |
+
t_G = nx.dfs_tree(G, root)
|
87 |
+
|
88 |
+
# Since we do not want to screw up the original graph,
|
89 |
+
# if we have a blank attribute, we make a deepcopy
|
90 |
+
if edge_weight is None or node_weight is None:
|
91 |
+
safe_G = deepcopy(G)
|
92 |
+
if edge_weight is None:
|
93 |
+
nx.set_edge_attributes(safe_G, D_EDGE_VALUE, D_EDGE_W)
|
94 |
+
edge_weight = D_EDGE_W
|
95 |
+
if node_weight is None:
|
96 |
+
nx.set_node_attributes(safe_G, D_NODE_VALUE, D_NODE_W)
|
97 |
+
node_weight = D_NODE_W
|
98 |
+
else:
|
99 |
+
safe_G = G
|
100 |
+
|
101 |
+
# Second sanity check
|
102 |
+
# The values of node_weight MUST BE int.
|
103 |
+
# I cannot see any room for duck typing without incurring serious
|
104 |
+
# danger of subtle bugs.
|
105 |
+
all_n_attr = nx.get_node_attributes(safe_G, node_weight).values()
|
106 |
+
for x in all_n_attr:
|
107 |
+
if not isinstance(x, int):
|
108 |
+
raise TypeError(
|
109 |
+
"lukes_partitioning needs integer "
|
110 |
+
f"values for node_weight ({node_weight})"
|
111 |
+
)
|
112 |
+
|
113 |
+
# SUBROUTINES -----------------------
|
114 |
+
# these functions are defined here for two reasons:
|
115 |
+
# - brevity: we can leverage global "safe_G"
|
116 |
+
# - caching: signatures are hashable
|
117 |
+
|
118 |
+
@not_implemented_for("undirected")
|
119 |
+
# this is intended to be called only on t_G
|
120 |
+
def _leaves(gr):
|
121 |
+
for x in gr.nodes:
|
122 |
+
if not nx.descendants(gr, x):
|
123 |
+
yield x
|
124 |
+
|
125 |
+
@not_implemented_for("undirected")
|
126 |
+
def _a_parent_of_leaves_only(gr):
|
127 |
+
tleaves = set(_leaves(gr))
|
128 |
+
for n in set(gr.nodes) - tleaves:
|
129 |
+
if all(x in tleaves for x in nx.descendants(gr, n)):
|
130 |
+
return n
|
131 |
+
|
132 |
+
@lru_cache(CLUSTER_EVAL_CACHE_SIZE)
|
133 |
+
def _value_of_cluster(cluster):
|
134 |
+
valid_edges = [e for e in safe_G.edges if e[0] in cluster and e[1] in cluster]
|
135 |
+
return sum(safe_G.edges[e][edge_weight] for e in valid_edges)
|
136 |
+
|
137 |
+
def _value_of_partition(partition):
|
138 |
+
return sum(_value_of_cluster(frozenset(c)) for c in partition)
|
139 |
+
|
140 |
+
@lru_cache(CLUSTER_EVAL_CACHE_SIZE)
|
141 |
+
def _weight_of_cluster(cluster):
|
142 |
+
return sum(safe_G.nodes[n][node_weight] for n in cluster)
|
143 |
+
|
144 |
+
def _pivot(partition, node):
|
145 |
+
ccx = [c for c in partition if node in c]
|
146 |
+
assert len(ccx) == 1
|
147 |
+
return ccx[0]
|
148 |
+
|
149 |
+
def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weight):
|
150 |
+
ccx = _pivot(partition_1, x)
|
151 |
+
cci = _pivot(partition_2, i)
|
152 |
+
merged_xi = ccx.union(cci)
|
153 |
+
|
154 |
+
# We first check if we can do the merge.
|
155 |
+
# If so, we do the actual calculations, otherwise we concatenate
|
156 |
+
if _weight_of_cluster(frozenset(merged_xi)) <= ref_weight:
|
157 |
+
cp1 = list(filter(lambda x: x != ccx, partition_1))
|
158 |
+
cp2 = list(filter(lambda x: x != cci, partition_2))
|
159 |
+
|
160 |
+
option_2 = [merged_xi] + cp1 + cp2
|
161 |
+
return option_2, _value_of_partition(option_2)
|
162 |
+
else:
|
163 |
+
option_1 = partition_1 + partition_2
|
164 |
+
return option_1, _value_of_partition(option_1)
|
165 |
+
|
166 |
+
# INITIALIZATION -----------------------
|
167 |
+
leaves = set(_leaves(t_G))
|
168 |
+
for lv in leaves:
|
169 |
+
t_G.nodes[lv][PKEY] = {}
|
170 |
+
slot = safe_G.nodes[lv][node_weight]
|
171 |
+
t_G.nodes[lv][PKEY][slot] = [{lv}]
|
172 |
+
t_G.nodes[lv][PKEY][0] = [{lv}]
|
173 |
+
|
174 |
+
for inner in [x for x in t_G.nodes if x not in leaves]:
|
175 |
+
t_G.nodes[inner][PKEY] = {}
|
176 |
+
slot = safe_G.nodes[inner][node_weight]
|
177 |
+
t_G.nodes[inner][PKEY][slot] = [{inner}]
|
178 |
+
nx._clear_cache(t_G)
|
179 |
+
|
180 |
+
# CORE ALGORITHM -----------------------
|
181 |
+
while True:
|
182 |
+
x_node = _a_parent_of_leaves_only(t_G)
|
183 |
+
weight_of_x = safe_G.nodes[x_node][node_weight]
|
184 |
+
best_value = 0
|
185 |
+
best_partition = None
|
186 |
+
bp_buffer = {}
|
187 |
+
x_descendants = nx.descendants(t_G, x_node)
|
188 |
+
for i_node in x_descendants:
|
189 |
+
for j in range(weight_of_x, max_size + 1):
|
190 |
+
for a, b in _split_n_from(j, weight_of_x):
|
191 |
+
if (
|
192 |
+
a not in t_G.nodes[x_node][PKEY]
|
193 |
+
or b not in t_G.nodes[i_node][PKEY]
|
194 |
+
):
|
195 |
+
# it's not possible to form this particular weight sum
|
196 |
+
continue
|
197 |
+
|
198 |
+
part1 = t_G.nodes[x_node][PKEY][a]
|
199 |
+
part2 = t_G.nodes[i_node][PKEY][b]
|
200 |
+
part, value = _concatenate_or_merge(part1, part2, x_node, i_node, j)
|
201 |
+
|
202 |
+
if j not in bp_buffer or bp_buffer[j][1] < value:
|
203 |
+
# we annotate in the buffer the best partition for j
|
204 |
+
bp_buffer[j] = part, value
|
205 |
+
|
206 |
+
# we also keep track of the overall best partition
|
207 |
+
if best_value <= value:
|
208 |
+
best_value = value
|
209 |
+
best_partition = part
|
210 |
+
|
211 |
+
# as illustrated in Lukes, once we finished a child, we can
|
212 |
+
# discharge the partitions we found into the graph
|
213 |
+
# (the key phrase is make all x == x')
|
214 |
+
# so that they are used by the subsequent children
|
215 |
+
for w, (best_part_for_vl, vl) in bp_buffer.items():
|
216 |
+
t_G.nodes[x_node][PKEY][w] = best_part_for_vl
|
217 |
+
bp_buffer.clear()
|
218 |
+
|
219 |
+
# the absolute best partition for this node
|
220 |
+
# across all weights has to be stored at 0
|
221 |
+
t_G.nodes[x_node][PKEY][0] = best_partition
|
222 |
+
t_G.remove_nodes_from(x_descendants)
|
223 |
+
|
224 |
+
if x_node == root:
|
225 |
+
# the 0-labeled partition of root
|
226 |
+
# is the optimal one for the whole tree
|
227 |
+
return t_G.nodes[root][PKEY][0]
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/modularity_max.py
ADDED
@@ -0,0 +1,451 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for detecting communities based on modularity."""
|
2 |
+
|
3 |
+
from collections import defaultdict
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
from networkx.algorithms.community.quality import modularity
|
7 |
+
from networkx.utils import not_implemented_for
|
8 |
+
from networkx.utils.mapped_queue import MappedQueue
|
9 |
+
|
10 |
+
__all__ = [
|
11 |
+
"greedy_modularity_communities",
|
12 |
+
"naive_greedy_modularity_communities",
|
13 |
+
]
|
14 |
+
|
15 |
+
|
16 |
+
def _greedy_modularity_communities_generator(G, weight=None, resolution=1):
|
17 |
+
r"""Yield community partitions of G and the modularity change at each step.
|
18 |
+
|
19 |
+
This function performs Clauset-Newman-Moore greedy modularity maximization [2]_
|
20 |
+
At each step of the process it yields the change in modularity that will occur in
|
21 |
+
the next step followed by yielding the new community partition after that step.
|
22 |
+
|
23 |
+
Greedy modularity maximization begins with each node in its own community
|
24 |
+
and repeatedly joins the pair of communities that lead to the largest
|
25 |
+
modularity until one community contains all nodes (the partition has one set).
|
26 |
+
|
27 |
+
This function maximizes the generalized modularity, where `resolution`
|
28 |
+
is the resolution parameter, often expressed as $\gamma$.
|
29 |
+
See :func:`~networkx.algorithms.community.quality.modularity`.
|
30 |
+
|
31 |
+
Parameters
|
32 |
+
----------
|
33 |
+
G : NetworkX graph
|
34 |
+
|
35 |
+
weight : string or None, optional (default=None)
|
36 |
+
The name of an edge attribute that holds the numerical value used
|
37 |
+
as a weight. If None, then each edge has weight 1.
|
38 |
+
The degree is the sum of the edge weights adjacent to the node.
|
39 |
+
|
40 |
+
resolution : float (default=1)
|
41 |
+
If resolution is less than 1, modularity favors larger communities.
|
42 |
+
Greater than 1 favors smaller communities.
|
43 |
+
|
44 |
+
Yields
|
45 |
+
------
|
46 |
+
Alternating yield statements produce the following two objects:
|
47 |
+
|
48 |
+
communities: dict_values
|
49 |
+
A dict_values of frozensets of nodes, one for each community.
|
50 |
+
This represents a partition of the nodes of the graph into communities.
|
51 |
+
The first yield is the partition with each node in its own community.
|
52 |
+
|
53 |
+
dq: float
|
54 |
+
The change in modularity when merging the next two communities
|
55 |
+
that leads to the largest modularity.
|
56 |
+
|
57 |
+
See Also
|
58 |
+
--------
|
59 |
+
modularity
|
60 |
+
|
61 |
+
References
|
62 |
+
----------
|
63 |
+
.. [1] Newman, M. E. J. "Networks: An Introduction", page 224
|
64 |
+
Oxford University Press 2011.
|
65 |
+
.. [2] Clauset, A., Newman, M. E., & Moore, C.
|
66 |
+
"Finding community structure in very large networks."
|
67 |
+
Physical Review E 70(6), 2004.
|
68 |
+
.. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
|
69 |
+
Detection" Phys. Rev. E74, 2006.
|
70 |
+
.. [4] Newman, M. E. J."Analysis of weighted networks"
|
71 |
+
Physical Review E 70(5 Pt 2):056131, 2004.
|
72 |
+
"""
|
73 |
+
directed = G.is_directed()
|
74 |
+
N = G.number_of_nodes()
|
75 |
+
|
76 |
+
# Count edges (or the sum of edge-weights for weighted graphs)
|
77 |
+
m = G.size(weight)
|
78 |
+
q0 = 1 / m
|
79 |
+
|
80 |
+
# Calculate degrees (notation from the papers)
|
81 |
+
# a : the fraction of (weighted) out-degree for each node
|
82 |
+
# b : the fraction of (weighted) in-degree for each node
|
83 |
+
if directed:
|
84 |
+
a = {node: deg_out * q0 for node, deg_out in G.out_degree(weight=weight)}
|
85 |
+
b = {node: deg_in * q0 for node, deg_in in G.in_degree(weight=weight)}
|
86 |
+
else:
|
87 |
+
a = b = {node: deg * q0 * 0.5 for node, deg in G.degree(weight=weight)}
|
88 |
+
|
89 |
+
# this preliminary step collects the edge weights for each node pair
|
90 |
+
# It handles multigraph and digraph and works fine for graph.
|
91 |
+
dq_dict = defaultdict(lambda: defaultdict(float))
|
92 |
+
for u, v, wt in G.edges(data=weight, default=1):
|
93 |
+
if u == v:
|
94 |
+
continue
|
95 |
+
dq_dict[u][v] += wt
|
96 |
+
dq_dict[v][u] += wt
|
97 |
+
|
98 |
+
# now scale and subtract the expected edge-weights term
|
99 |
+
for u, nbrdict in dq_dict.items():
|
100 |
+
for v, wt in nbrdict.items():
|
101 |
+
dq_dict[u][v] = q0 * wt - resolution * (a[u] * b[v] + b[u] * a[v])
|
102 |
+
|
103 |
+
# Use -dq to get a max_heap instead of a min_heap
|
104 |
+
# dq_heap holds a heap for each node's neighbors
|
105 |
+
dq_heap = {u: MappedQueue({(u, v): -dq for v, dq in dq_dict[u].items()}) for u in G}
|
106 |
+
# H -> all_dq_heap holds a heap with the best items for each node
|
107 |
+
H = MappedQueue([dq_heap[n].heap[0] for n in G if len(dq_heap[n]) > 0])
|
108 |
+
|
109 |
+
# Initialize single-node communities
|
110 |
+
communities = {n: frozenset([n]) for n in G}
|
111 |
+
yield communities.values()
|
112 |
+
|
113 |
+
# Merge the two communities that lead to the largest modularity
|
114 |
+
while len(H) > 1:
|
115 |
+
# Find best merge
|
116 |
+
# Remove from heap of row maxes
|
117 |
+
# Ties will be broken by choosing the pair with lowest min community id
|
118 |
+
try:
|
119 |
+
negdq, u, v = H.pop()
|
120 |
+
except IndexError:
|
121 |
+
break
|
122 |
+
dq = -negdq
|
123 |
+
yield dq
|
124 |
+
# Remove best merge from row u heap
|
125 |
+
dq_heap[u].pop()
|
126 |
+
# Push new row max onto H
|
127 |
+
if len(dq_heap[u]) > 0:
|
128 |
+
H.push(dq_heap[u].heap[0])
|
129 |
+
# If this element was also at the root of row v, we need to remove the
|
130 |
+
# duplicate entry from H
|
131 |
+
if dq_heap[v].heap[0] == (v, u):
|
132 |
+
H.remove((v, u))
|
133 |
+
# Remove best merge from row v heap
|
134 |
+
dq_heap[v].remove((v, u))
|
135 |
+
# Push new row max onto H
|
136 |
+
if len(dq_heap[v]) > 0:
|
137 |
+
H.push(dq_heap[v].heap[0])
|
138 |
+
else:
|
139 |
+
# Duplicate wasn't in H, just remove from row v heap
|
140 |
+
dq_heap[v].remove((v, u))
|
141 |
+
|
142 |
+
# Perform merge
|
143 |
+
communities[v] = frozenset(communities[u] | communities[v])
|
144 |
+
del communities[u]
|
145 |
+
|
146 |
+
# Get neighbor communities connected to the merged communities
|
147 |
+
u_nbrs = set(dq_dict[u])
|
148 |
+
v_nbrs = set(dq_dict[v])
|
149 |
+
all_nbrs = (u_nbrs | v_nbrs) - {u, v}
|
150 |
+
both_nbrs = u_nbrs & v_nbrs
|
151 |
+
# Update dq for merge of u into v
|
152 |
+
for w in all_nbrs:
|
153 |
+
# Calculate new dq value
|
154 |
+
if w in both_nbrs:
|
155 |
+
dq_vw = dq_dict[v][w] + dq_dict[u][w]
|
156 |
+
elif w in v_nbrs:
|
157 |
+
dq_vw = dq_dict[v][w] - resolution * (a[u] * b[w] + a[w] * b[u])
|
158 |
+
else: # w in u_nbrs
|
159 |
+
dq_vw = dq_dict[u][w] - resolution * (a[v] * b[w] + a[w] * b[v])
|
160 |
+
# Update rows v and w
|
161 |
+
for row, col in [(v, w), (w, v)]:
|
162 |
+
dq_heap_row = dq_heap[row]
|
163 |
+
# Update dict for v,w only (u is removed below)
|
164 |
+
dq_dict[row][col] = dq_vw
|
165 |
+
# Save old max of per-row heap
|
166 |
+
if len(dq_heap_row) > 0:
|
167 |
+
d_oldmax = dq_heap_row.heap[0]
|
168 |
+
else:
|
169 |
+
d_oldmax = None
|
170 |
+
# Add/update heaps
|
171 |
+
d = (row, col)
|
172 |
+
d_negdq = -dq_vw
|
173 |
+
# Save old value for finding heap index
|
174 |
+
if w in v_nbrs:
|
175 |
+
# Update existing element in per-row heap
|
176 |
+
dq_heap_row.update(d, d, priority=d_negdq)
|
177 |
+
else:
|
178 |
+
# We're creating a new nonzero element, add to heap
|
179 |
+
dq_heap_row.push(d, priority=d_negdq)
|
180 |
+
# Update heap of row maxes if necessary
|
181 |
+
if d_oldmax is None:
|
182 |
+
# No entries previously in this row, push new max
|
183 |
+
H.push(d, priority=d_negdq)
|
184 |
+
else:
|
185 |
+
# We've updated an entry in this row, has the max changed?
|
186 |
+
row_max = dq_heap_row.heap[0]
|
187 |
+
if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
|
188 |
+
H.update(d_oldmax, row_max)
|
189 |
+
|
190 |
+
# Remove row/col u from dq_dict matrix
|
191 |
+
for w in dq_dict[u]:
|
192 |
+
# Remove from dict
|
193 |
+
dq_old = dq_dict[w][u]
|
194 |
+
del dq_dict[w][u]
|
195 |
+
# Remove from heaps if we haven't already
|
196 |
+
if w != v:
|
197 |
+
# Remove both row and column
|
198 |
+
for row, col in [(w, u), (u, w)]:
|
199 |
+
dq_heap_row = dq_heap[row]
|
200 |
+
# Check if replaced dq is row max
|
201 |
+
d_old = (row, col)
|
202 |
+
if dq_heap_row.heap[0] == d_old:
|
203 |
+
# Update per-row heap and heap of row maxes
|
204 |
+
dq_heap_row.remove(d_old)
|
205 |
+
H.remove(d_old)
|
206 |
+
# Update row max
|
207 |
+
if len(dq_heap_row) > 0:
|
208 |
+
H.push(dq_heap_row.heap[0])
|
209 |
+
else:
|
210 |
+
# Only update per-row heap
|
211 |
+
dq_heap_row.remove(d_old)
|
212 |
+
|
213 |
+
del dq_dict[u]
|
214 |
+
# Mark row u as deleted, but keep placeholder
|
215 |
+
dq_heap[u] = MappedQueue()
|
216 |
+
# Merge u into v and update a
|
217 |
+
a[v] += a[u]
|
218 |
+
a[u] = 0
|
219 |
+
if directed:
|
220 |
+
b[v] += b[u]
|
221 |
+
b[u] = 0
|
222 |
+
|
223 |
+
yield communities.values()
|
224 |
+
|
225 |
+
|
226 |
+
@nx._dispatchable(edge_attrs="weight")
|
227 |
+
def greedy_modularity_communities(
|
228 |
+
G,
|
229 |
+
weight=None,
|
230 |
+
resolution=1,
|
231 |
+
cutoff=1,
|
232 |
+
best_n=None,
|
233 |
+
):
|
234 |
+
r"""Find communities in G using greedy modularity maximization.
|
235 |
+
|
236 |
+
This function uses Clauset-Newman-Moore greedy modularity maximization [2]_
|
237 |
+
to find the community partition with the largest modularity.
|
238 |
+
|
239 |
+
Greedy modularity maximization begins with each node in its own community
|
240 |
+
and repeatedly joins the pair of communities that lead to the largest
|
241 |
+
modularity until no further increase in modularity is possible (a maximum).
|
242 |
+
Two keyword arguments adjust the stopping condition. `cutoff` is a lower
|
243 |
+
limit on the number of communities so you can stop the process before
|
244 |
+
reaching a maximum (used to save computation time). `best_n` is an upper
|
245 |
+
limit on the number of communities so you can make the process continue
|
246 |
+
until at most n communities remain even if the maximum modularity occurs
|
247 |
+
for more. To obtain exactly n communities, set both `cutoff` and `best_n` to n.
|
248 |
+
|
249 |
+
This function maximizes the generalized modularity, where `resolution`
|
250 |
+
is the resolution parameter, often expressed as $\gamma$.
|
251 |
+
See :func:`~networkx.algorithms.community.quality.modularity`.
|
252 |
+
|
253 |
+
Parameters
|
254 |
+
----------
|
255 |
+
G : NetworkX graph
|
256 |
+
|
257 |
+
weight : string or None, optional (default=None)
|
258 |
+
The name of an edge attribute that holds the numerical value used
|
259 |
+
as a weight. If None, then each edge has weight 1.
|
260 |
+
The degree is the sum of the edge weights adjacent to the node.
|
261 |
+
|
262 |
+
resolution : float, optional (default=1)
|
263 |
+
If resolution is less than 1, modularity favors larger communities.
|
264 |
+
Greater than 1 favors smaller communities.
|
265 |
+
|
266 |
+
cutoff : int, optional (default=1)
|
267 |
+
A minimum number of communities below which the merging process stops.
|
268 |
+
The process stops at this number of communities even if modularity
|
269 |
+
is not maximized. The goal is to let the user stop the process early.
|
270 |
+
The process stops before the cutoff if it finds a maximum of modularity.
|
271 |
+
|
272 |
+
best_n : int or None, optional (default=None)
|
273 |
+
A maximum number of communities above which the merging process will
|
274 |
+
not stop. This forces community merging to continue after modularity
|
275 |
+
starts to decrease until `best_n` communities remain.
|
276 |
+
If ``None``, don't force it to continue beyond a maximum.
|
277 |
+
|
278 |
+
Raises
|
279 |
+
------
|
280 |
+
ValueError : If the `cutoff` or `best_n` value is not in the range
|
281 |
+
``[1, G.number_of_nodes()]``, or if `best_n` < `cutoff`.
|
282 |
+
|
283 |
+
Returns
|
284 |
+
-------
|
285 |
+
communities: list
|
286 |
+
A list of frozensets of nodes, one for each community.
|
287 |
+
Sorted by length with largest communities first.
|
288 |
+
|
289 |
+
Examples
|
290 |
+
--------
|
291 |
+
>>> G = nx.karate_club_graph()
|
292 |
+
>>> c = nx.community.greedy_modularity_communities(G)
|
293 |
+
>>> sorted(c[0])
|
294 |
+
[8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
|
295 |
+
|
296 |
+
See Also
|
297 |
+
--------
|
298 |
+
modularity
|
299 |
+
|
300 |
+
References
|
301 |
+
----------
|
302 |
+
.. [1] Newman, M. E. J. "Networks: An Introduction", page 224
|
303 |
+
Oxford University Press 2011.
|
304 |
+
.. [2] Clauset, A., Newman, M. E., & Moore, C.
|
305 |
+
"Finding community structure in very large networks."
|
306 |
+
Physical Review E 70(6), 2004.
|
307 |
+
.. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
|
308 |
+
Detection" Phys. Rev. E74, 2006.
|
309 |
+
.. [4] Newman, M. E. J."Analysis of weighted networks"
|
310 |
+
Physical Review E 70(5 Pt 2):056131, 2004.
|
311 |
+
"""
|
312 |
+
if not G.size():
|
313 |
+
return [{n} for n in G]
|
314 |
+
|
315 |
+
if (cutoff < 1) or (cutoff > G.number_of_nodes()):
|
316 |
+
raise ValueError(f"cutoff must be between 1 and {len(G)}. Got {cutoff}.")
|
317 |
+
if best_n is not None:
|
318 |
+
if (best_n < 1) or (best_n > G.number_of_nodes()):
|
319 |
+
raise ValueError(f"best_n must be between 1 and {len(G)}. Got {best_n}.")
|
320 |
+
if best_n < cutoff:
|
321 |
+
raise ValueError(f"Must have best_n >= cutoff. Got {best_n} < {cutoff}")
|
322 |
+
if best_n == 1:
|
323 |
+
return [set(G)]
|
324 |
+
else:
|
325 |
+
best_n = G.number_of_nodes()
|
326 |
+
|
327 |
+
# retrieve generator object to construct output
|
328 |
+
community_gen = _greedy_modularity_communities_generator(
|
329 |
+
G, weight=weight, resolution=resolution
|
330 |
+
)
|
331 |
+
|
332 |
+
# construct the first best community
|
333 |
+
communities = next(community_gen)
|
334 |
+
|
335 |
+
# continue merging communities until one of the breaking criteria is satisfied
|
336 |
+
while len(communities) > cutoff:
|
337 |
+
try:
|
338 |
+
dq = next(community_gen)
|
339 |
+
# StopIteration occurs when communities are the connected components
|
340 |
+
except StopIteration:
|
341 |
+
communities = sorted(communities, key=len, reverse=True)
|
342 |
+
# if best_n requires more merging, merge big sets for highest modularity
|
343 |
+
while len(communities) > best_n:
|
344 |
+
comm1, comm2, *rest = communities
|
345 |
+
communities = [comm1 ^ comm2]
|
346 |
+
communities.extend(rest)
|
347 |
+
return communities
|
348 |
+
|
349 |
+
# keep going unless max_mod is reached or best_n says to merge more
|
350 |
+
if dq < 0 and len(communities) <= best_n:
|
351 |
+
break
|
352 |
+
communities = next(community_gen)
|
353 |
+
|
354 |
+
return sorted(communities, key=len, reverse=True)
|
355 |
+
|
356 |
+
|
357 |
+
@not_implemented_for("directed")
|
358 |
+
@not_implemented_for("multigraph")
|
359 |
+
@nx._dispatchable(edge_attrs="weight")
|
360 |
+
def naive_greedy_modularity_communities(G, resolution=1, weight=None):
|
361 |
+
r"""Find communities in G using greedy modularity maximization.
|
362 |
+
|
363 |
+
This implementation is O(n^4), much slower than alternatives, but it is
|
364 |
+
provided as an easy-to-understand reference implementation.
|
365 |
+
|
366 |
+
Greedy modularity maximization begins with each node in its own community
|
367 |
+
and joins the pair of communities that most increases modularity until no
|
368 |
+
such pair exists.
|
369 |
+
|
370 |
+
This function maximizes the generalized modularity, where `resolution`
|
371 |
+
is the resolution parameter, often expressed as $\gamma$.
|
372 |
+
See :func:`~networkx.algorithms.community.quality.modularity`.
|
373 |
+
|
374 |
+
Parameters
|
375 |
+
----------
|
376 |
+
G : NetworkX graph
|
377 |
+
Graph must be simple and undirected.
|
378 |
+
|
379 |
+
resolution : float (default=1)
|
380 |
+
If resolution is less than 1, modularity favors larger communities.
|
381 |
+
Greater than 1 favors smaller communities.
|
382 |
+
|
383 |
+
weight : string or None, optional (default=None)
|
384 |
+
The name of an edge attribute that holds the numerical value used
|
385 |
+
as a weight. If None, then each edge has weight 1.
|
386 |
+
The degree is the sum of the edge weights adjacent to the node.
|
387 |
+
|
388 |
+
Returns
|
389 |
+
-------
|
390 |
+
list
|
391 |
+
A list of sets of nodes, one for each community.
|
392 |
+
Sorted by length with largest communities first.
|
393 |
+
|
394 |
+
Examples
|
395 |
+
--------
|
396 |
+
>>> G = nx.karate_club_graph()
|
397 |
+
>>> c = nx.community.naive_greedy_modularity_communities(G)
|
398 |
+
>>> sorted(c[0])
|
399 |
+
[8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
|
400 |
+
|
401 |
+
See Also
|
402 |
+
--------
|
403 |
+
greedy_modularity_communities
|
404 |
+
modularity
|
405 |
+
"""
|
406 |
+
# First create one community for each node
|
407 |
+
communities = [frozenset([u]) for u in G.nodes()]
|
408 |
+
# Track merges
|
409 |
+
merges = []
|
410 |
+
# Greedily merge communities until no improvement is possible
|
411 |
+
old_modularity = None
|
412 |
+
new_modularity = modularity(G, communities, resolution=resolution, weight=weight)
|
413 |
+
while old_modularity is None or new_modularity > old_modularity:
|
414 |
+
# Save modularity for comparison
|
415 |
+
old_modularity = new_modularity
|
416 |
+
# Find best pair to merge
|
417 |
+
trial_communities = list(communities)
|
418 |
+
to_merge = None
|
419 |
+
for i, u in enumerate(communities):
|
420 |
+
for j, v in enumerate(communities):
|
421 |
+
# Skip i==j and empty communities
|
422 |
+
if j <= i or len(u) == 0 or len(v) == 0:
|
423 |
+
continue
|
424 |
+
# Merge communities u and v
|
425 |
+
trial_communities[j] = u | v
|
426 |
+
trial_communities[i] = frozenset([])
|
427 |
+
trial_modularity = modularity(
|
428 |
+
G, trial_communities, resolution=resolution, weight=weight
|
429 |
+
)
|
430 |
+
if trial_modularity >= new_modularity:
|
431 |
+
# Check if strictly better or tie
|
432 |
+
if trial_modularity > new_modularity:
|
433 |
+
# Found new best, save modularity and group indexes
|
434 |
+
new_modularity = trial_modularity
|
435 |
+
to_merge = (i, j, new_modularity - old_modularity)
|
436 |
+
elif to_merge and min(i, j) < min(to_merge[0], to_merge[1]):
|
437 |
+
# Break ties by choosing pair with lowest min id
|
438 |
+
new_modularity = trial_modularity
|
439 |
+
to_merge = (i, j, new_modularity - old_modularity)
|
440 |
+
# Un-merge
|
441 |
+
trial_communities[i] = u
|
442 |
+
trial_communities[j] = v
|
443 |
+
if to_merge is not None:
|
444 |
+
# If the best merge improves modularity, use it
|
445 |
+
merges.append(to_merge)
|
446 |
+
i, j, dq = to_merge
|
447 |
+
u, v = communities[i], communities[j]
|
448 |
+
communities[j] = u | v
|
449 |
+
communities[i] = frozenset([])
|
450 |
+
# Remove empty communities and sort
|
451 |
+
return sorted((c for c in communities if len(c) > 0), key=len, reverse=True)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/quality.py
ADDED
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for measuring the quality of a partition (into
|
2 |
+
communities).
|
3 |
+
|
4 |
+
"""
|
5 |
+
|
6 |
+
from itertools import combinations
|
7 |
+
|
8 |
+
import networkx as nx
|
9 |
+
from networkx import NetworkXError
|
10 |
+
from networkx.algorithms.community.community_utils import is_partition
|
11 |
+
from networkx.utils.decorators import argmap
|
12 |
+
|
13 |
+
__all__ = ["modularity", "partition_quality"]
|
14 |
+
|
15 |
+
|
16 |
+
class NotAPartition(NetworkXError):
|
17 |
+
"""Raised if a given collection is not a partition."""
|
18 |
+
|
19 |
+
def __init__(self, G, collection):
|
20 |
+
msg = f"{collection} is not a valid partition of the graph {G}"
|
21 |
+
super().__init__(msg)
|
22 |
+
|
23 |
+
|
24 |
+
def _require_partition(G, partition):
|
25 |
+
"""Decorator to check that a valid partition is input to a function
|
26 |
+
|
27 |
+
Raises :exc:`networkx.NetworkXError` if the partition is not valid.
|
28 |
+
|
29 |
+
This decorator should be used on functions whose first two arguments
|
30 |
+
are a graph and a partition of the nodes of that graph (in that
|
31 |
+
order)::
|
32 |
+
|
33 |
+
>>> @require_partition
|
34 |
+
... def foo(G, partition):
|
35 |
+
... print("partition is valid!")
|
36 |
+
...
|
37 |
+
>>> G = nx.complete_graph(5)
|
38 |
+
>>> partition = [{0, 1}, {2, 3}, {4}]
|
39 |
+
>>> foo(G, partition)
|
40 |
+
partition is valid!
|
41 |
+
>>> partition = [{0}, {2, 3}, {4}]
|
42 |
+
>>> foo(G, partition)
|
43 |
+
Traceback (most recent call last):
|
44 |
+
...
|
45 |
+
networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
|
46 |
+
>>> partition = [{0, 1}, {1, 2, 3}, {4}]
|
47 |
+
>>> foo(G, partition)
|
48 |
+
Traceback (most recent call last):
|
49 |
+
...
|
50 |
+
networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
|
51 |
+
|
52 |
+
"""
|
53 |
+
if is_partition(G, partition):
|
54 |
+
return G, partition
|
55 |
+
raise nx.NetworkXError("`partition` is not a valid partition of the nodes of G")
|
56 |
+
|
57 |
+
|
58 |
+
require_partition = argmap(_require_partition, (0, 1))
|
59 |
+
|
60 |
+
|
61 |
+
@nx._dispatchable
|
62 |
+
def intra_community_edges(G, partition):
|
63 |
+
"""Returns the number of intra-community edges for a partition of `G`.
|
64 |
+
|
65 |
+
Parameters
|
66 |
+
----------
|
67 |
+
G : NetworkX graph.
|
68 |
+
|
69 |
+
partition : iterable of sets of nodes
|
70 |
+
This must be a partition of the nodes of `G`.
|
71 |
+
|
72 |
+
The "intra-community edges" are those edges joining a pair of nodes
|
73 |
+
in the same block of the partition.
|
74 |
+
|
75 |
+
"""
|
76 |
+
return sum(G.subgraph(block).size() for block in partition)
|
77 |
+
|
78 |
+
|
79 |
+
@nx._dispatchable
|
80 |
+
def inter_community_edges(G, partition):
|
81 |
+
"""Returns the number of inter-community edges for a partition of `G`.
|
82 |
+
according to the given
|
83 |
+
partition of the nodes of `G`.
|
84 |
+
|
85 |
+
Parameters
|
86 |
+
----------
|
87 |
+
G : NetworkX graph.
|
88 |
+
|
89 |
+
partition : iterable of sets of nodes
|
90 |
+
This must be a partition of the nodes of `G`.
|
91 |
+
|
92 |
+
The *inter-community edges* are those edges joining a pair of nodes
|
93 |
+
in different blocks of the partition.
|
94 |
+
|
95 |
+
Implementation note: this function creates an intermediate graph
|
96 |
+
that may require the same amount of memory as that of `G`.
|
97 |
+
|
98 |
+
"""
|
99 |
+
# Alternate implementation that does not require constructing a new
|
100 |
+
# graph object (but does require constructing an affiliation
|
101 |
+
# dictionary):
|
102 |
+
#
|
103 |
+
# aff = dict(chain.from_iterable(((v, block) for v in block)
|
104 |
+
# for block in partition))
|
105 |
+
# return sum(1 for u, v in G.edges() if aff[u] != aff[v])
|
106 |
+
#
|
107 |
+
MG = nx.MultiDiGraph if G.is_directed() else nx.MultiGraph
|
108 |
+
return nx.quotient_graph(G, partition, create_using=MG).size()
|
109 |
+
|
110 |
+
|
111 |
+
@nx._dispatchable
|
112 |
+
def inter_community_non_edges(G, partition):
|
113 |
+
"""Returns the number of inter-community non-edges according to the
|
114 |
+
given partition of the nodes of `G`.
|
115 |
+
|
116 |
+
Parameters
|
117 |
+
----------
|
118 |
+
G : NetworkX graph.
|
119 |
+
|
120 |
+
partition : iterable of sets of nodes
|
121 |
+
This must be a partition of the nodes of `G`.
|
122 |
+
|
123 |
+
A *non-edge* is a pair of nodes (undirected if `G` is undirected)
|
124 |
+
that are not adjacent in `G`. The *inter-community non-edges* are
|
125 |
+
those non-edges on a pair of nodes in different blocks of the
|
126 |
+
partition.
|
127 |
+
|
128 |
+
Implementation note: this function creates two intermediate graphs,
|
129 |
+
which may require up to twice the amount of memory as required to
|
130 |
+
store `G`.
|
131 |
+
|
132 |
+
"""
|
133 |
+
# Alternate implementation that does not require constructing two
|
134 |
+
# new graph objects (but does require constructing an affiliation
|
135 |
+
# dictionary):
|
136 |
+
#
|
137 |
+
# aff = dict(chain.from_iterable(((v, block) for v in block)
|
138 |
+
# for block in partition))
|
139 |
+
# return sum(1 for u, v in nx.non_edges(G) if aff[u] != aff[v])
|
140 |
+
#
|
141 |
+
return inter_community_edges(nx.complement(G), partition)
|
142 |
+
|
143 |
+
|
144 |
+
@nx._dispatchable(edge_attrs="weight")
|
145 |
+
def modularity(G, communities, weight="weight", resolution=1):
|
146 |
+
r"""Returns the modularity of the given partition of the graph.
|
147 |
+
|
148 |
+
Modularity is defined in [1]_ as
|
149 |
+
|
150 |
+
.. math::
|
151 |
+
Q = \frac{1}{2m} \sum_{ij} \left( A_{ij} - \gamma\frac{k_ik_j}{2m}\right)
|
152 |
+
\delta(c_i,c_j)
|
153 |
+
|
154 |
+
where $m$ is the number of edges (or sum of all edge weights as in [5]_),
|
155 |
+
$A$ is the adjacency matrix of `G`, $k_i$ is the (weighted) degree of $i$,
|
156 |
+
$\gamma$ is the resolution parameter, and $\delta(c_i, c_j)$ is 1 if $i$ and
|
157 |
+
$j$ are in the same community else 0.
|
158 |
+
|
159 |
+
According to [2]_ (and verified by some algebra) this can be reduced to
|
160 |
+
|
161 |
+
.. math::
|
162 |
+
Q = \sum_{c=1}^{n}
|
163 |
+
\left[ \frac{L_c}{m} - \gamma\left( \frac{k_c}{2m} \right) ^2 \right]
|
164 |
+
|
165 |
+
where the sum iterates over all communities $c$, $m$ is the number of edges,
|
166 |
+
$L_c$ is the number of intra-community links for community $c$,
|
167 |
+
$k_c$ is the sum of degrees of the nodes in community $c$,
|
168 |
+
and $\gamma$ is the resolution parameter.
|
169 |
+
|
170 |
+
The resolution parameter sets an arbitrary tradeoff between intra-group
|
171 |
+
edges and inter-group edges. More complex grouping patterns can be
|
172 |
+
discovered by analyzing the same network with multiple values of gamma
|
173 |
+
and then combining the results [3]_. That said, it is very common to
|
174 |
+
simply use gamma=1. More on the choice of gamma is in [4]_.
|
175 |
+
|
176 |
+
The second formula is the one actually used in calculation of the modularity.
|
177 |
+
For directed graphs the second formula replaces $k_c$ with $k^{in}_c k^{out}_c$.
|
178 |
+
|
179 |
+
Parameters
|
180 |
+
----------
|
181 |
+
G : NetworkX Graph
|
182 |
+
|
183 |
+
communities : list or iterable of set of nodes
|
184 |
+
These node sets must represent a partition of G's nodes.
|
185 |
+
|
186 |
+
weight : string or None, optional (default="weight")
|
187 |
+
The edge attribute that holds the numerical value used
|
188 |
+
as a weight. If None or an edge does not have that attribute,
|
189 |
+
then that edge has weight 1.
|
190 |
+
|
191 |
+
resolution : float (default=1)
|
192 |
+
If resolution is less than 1, modularity favors larger communities.
|
193 |
+
Greater than 1 favors smaller communities.
|
194 |
+
|
195 |
+
Returns
|
196 |
+
-------
|
197 |
+
Q : float
|
198 |
+
The modularity of the partition.
|
199 |
+
|
200 |
+
Raises
|
201 |
+
------
|
202 |
+
NotAPartition
|
203 |
+
If `communities` is not a partition of the nodes of `G`.
|
204 |
+
|
205 |
+
Examples
|
206 |
+
--------
|
207 |
+
>>> G = nx.barbell_graph(3, 0)
|
208 |
+
>>> nx.community.modularity(G, [{0, 1, 2}, {3, 4, 5}])
|
209 |
+
0.35714285714285715
|
210 |
+
>>> nx.community.modularity(G, nx.community.label_propagation_communities(G))
|
211 |
+
0.35714285714285715
|
212 |
+
|
213 |
+
References
|
214 |
+
----------
|
215 |
+
.. [1] M. E. J. Newman "Networks: An Introduction", page 224.
|
216 |
+
Oxford University Press, 2011.
|
217 |
+
.. [2] Clauset, Aaron, Mark EJ Newman, and Cristopher Moore.
|
218 |
+
"Finding community structure in very large networks."
|
219 |
+
Phys. Rev. E 70.6 (2004). <https://arxiv.org/abs/cond-mat/0408187>
|
220 |
+
.. [3] Reichardt and Bornholdt "Statistical Mechanics of Community Detection"
|
221 |
+
Phys. Rev. E 74, 016110, 2006. https://doi.org/10.1103/PhysRevE.74.016110
|
222 |
+
.. [4] M. E. J. Newman, "Equivalence between modularity optimization and
|
223 |
+
maximum likelihood methods for community detection"
|
224 |
+
Phys. Rev. E 94, 052315, 2016. https://doi.org/10.1103/PhysRevE.94.052315
|
225 |
+
.. [5] Blondel, V.D. et al. "Fast unfolding of communities in large
|
226 |
+
networks" J. Stat. Mech 10008, 1-12 (2008).
|
227 |
+
https://doi.org/10.1088/1742-5468/2008/10/P10008
|
228 |
+
"""
|
229 |
+
if not isinstance(communities, list):
|
230 |
+
communities = list(communities)
|
231 |
+
if not is_partition(G, communities):
|
232 |
+
raise NotAPartition(G, communities)
|
233 |
+
|
234 |
+
directed = G.is_directed()
|
235 |
+
if directed:
|
236 |
+
out_degree = dict(G.out_degree(weight=weight))
|
237 |
+
in_degree = dict(G.in_degree(weight=weight))
|
238 |
+
m = sum(out_degree.values())
|
239 |
+
norm = 1 / m**2
|
240 |
+
else:
|
241 |
+
out_degree = in_degree = dict(G.degree(weight=weight))
|
242 |
+
deg_sum = sum(out_degree.values())
|
243 |
+
m = deg_sum / 2
|
244 |
+
norm = 1 / deg_sum**2
|
245 |
+
|
246 |
+
def community_contribution(community):
|
247 |
+
comm = set(community)
|
248 |
+
L_c = sum(wt for u, v, wt in G.edges(comm, data=weight, default=1) if v in comm)
|
249 |
+
|
250 |
+
out_degree_sum = sum(out_degree[u] for u in comm)
|
251 |
+
in_degree_sum = sum(in_degree[u] for u in comm) if directed else out_degree_sum
|
252 |
+
|
253 |
+
return L_c / m - resolution * out_degree_sum * in_degree_sum * norm
|
254 |
+
|
255 |
+
return sum(map(community_contribution, communities))
|
256 |
+
|
257 |
+
|
258 |
+
@require_partition
|
259 |
+
@nx._dispatchable
|
260 |
+
def partition_quality(G, partition):
|
261 |
+
"""Returns the coverage and performance of a partition of G.
|
262 |
+
|
263 |
+
The *coverage* of a partition is the ratio of the number of
|
264 |
+
intra-community edges to the total number of edges in the graph.
|
265 |
+
|
266 |
+
The *performance* of a partition is the number of
|
267 |
+
intra-community edges plus inter-community non-edges divided by the total
|
268 |
+
number of potential edges.
|
269 |
+
|
270 |
+
This algorithm has complexity $O(C^2 + L)$ where C is the number of communities and L is the number of links.
|
271 |
+
|
272 |
+
Parameters
|
273 |
+
----------
|
274 |
+
G : NetworkX graph
|
275 |
+
|
276 |
+
partition : sequence
|
277 |
+
Partition of the nodes of `G`, represented as a sequence of
|
278 |
+
sets of nodes (blocks). Each block of the partition represents a
|
279 |
+
community.
|
280 |
+
|
281 |
+
Returns
|
282 |
+
-------
|
283 |
+
(float, float)
|
284 |
+
The (coverage, performance) tuple of the partition, as defined above.
|
285 |
+
|
286 |
+
Raises
|
287 |
+
------
|
288 |
+
NetworkXError
|
289 |
+
If `partition` is not a valid partition of the nodes of `G`.
|
290 |
+
|
291 |
+
Notes
|
292 |
+
-----
|
293 |
+
If `G` is a multigraph;
|
294 |
+
- for coverage, the multiplicity of edges is counted
|
295 |
+
- for performance, the result is -1 (total number of possible edges is not defined)
|
296 |
+
|
297 |
+
References
|
298 |
+
----------
|
299 |
+
.. [1] Santo Fortunato.
|
300 |
+
"Community Detection in Graphs".
|
301 |
+
*Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
|
302 |
+
<https://arxiv.org/abs/0906.0612>
|
303 |
+
"""
|
304 |
+
|
305 |
+
node_community = {}
|
306 |
+
for i, community in enumerate(partition):
|
307 |
+
for node in community:
|
308 |
+
node_community[node] = i
|
309 |
+
|
310 |
+
# `performance` is not defined for multigraphs
|
311 |
+
if not G.is_multigraph():
|
312 |
+
# Iterate over the communities, quadratic, to calculate `possible_inter_community_edges`
|
313 |
+
possible_inter_community_edges = sum(
|
314 |
+
len(p1) * len(p2) for p1, p2 in combinations(partition, 2)
|
315 |
+
)
|
316 |
+
|
317 |
+
if G.is_directed():
|
318 |
+
possible_inter_community_edges *= 2
|
319 |
+
else:
|
320 |
+
possible_inter_community_edges = 0
|
321 |
+
|
322 |
+
# Compute the number of edges in the complete graph -- `n` nodes,
|
323 |
+
# directed or undirected, depending on `G`
|
324 |
+
n = len(G)
|
325 |
+
total_pairs = n * (n - 1)
|
326 |
+
if not G.is_directed():
|
327 |
+
total_pairs //= 2
|
328 |
+
|
329 |
+
intra_community_edges = 0
|
330 |
+
inter_community_non_edges = possible_inter_community_edges
|
331 |
+
|
332 |
+
# Iterate over the links to count `intra_community_edges` and `inter_community_non_edges`
|
333 |
+
for e in G.edges():
|
334 |
+
if node_community[e[0]] == node_community[e[1]]:
|
335 |
+
intra_community_edges += 1
|
336 |
+
else:
|
337 |
+
inter_community_non_edges -= 1
|
338 |
+
|
339 |
+
coverage = intra_community_edges / len(G.edges)
|
340 |
+
|
341 |
+
if G.is_multigraph():
|
342 |
+
performance = -1.0
|
343 |
+
else:
|
344 |
+
performance = (intra_community_edges + inter_community_non_edges) / total_pairs
|
345 |
+
|
346 |
+
return coverage, performance
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/__init__.py
ADDED
File without changes
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py
ADDED
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
from networkx import Graph, NetworkXError
|
5 |
+
from networkx.algorithms.community import asyn_fluidc
|
6 |
+
|
7 |
+
|
8 |
+
@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
|
9 |
+
def test_raises_on_directed_and_multigraphs(graph_constructor):
|
10 |
+
G = graph_constructor([(0, 1), (1, 2)])
|
11 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
12 |
+
nx.community.asyn_fluidc(G, 1)
|
13 |
+
|
14 |
+
|
15 |
+
def test_exceptions():
|
16 |
+
test = Graph()
|
17 |
+
test.add_node("a")
|
18 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, "hi")
|
19 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, -1)
|
20 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, 3)
|
21 |
+
test.add_node("b")
|
22 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, 1)
|
23 |
+
|
24 |
+
|
25 |
+
def test_single_node():
|
26 |
+
test = Graph()
|
27 |
+
|
28 |
+
test.add_node("a")
|
29 |
+
|
30 |
+
# ground truth
|
31 |
+
ground_truth = {frozenset(["a"])}
|
32 |
+
|
33 |
+
communities = asyn_fluidc(test, 1)
|
34 |
+
result = {frozenset(c) for c in communities}
|
35 |
+
assert result == ground_truth
|
36 |
+
|
37 |
+
|
38 |
+
def test_two_nodes():
|
39 |
+
test = Graph()
|
40 |
+
|
41 |
+
test.add_edge("a", "b")
|
42 |
+
|
43 |
+
# ground truth
|
44 |
+
ground_truth = {frozenset(["a"]), frozenset(["b"])}
|
45 |
+
|
46 |
+
communities = asyn_fluidc(test, 2)
|
47 |
+
result = {frozenset(c) for c in communities}
|
48 |
+
assert result == ground_truth
|
49 |
+
|
50 |
+
|
51 |
+
def test_two_clique_communities():
|
52 |
+
test = Graph()
|
53 |
+
|
54 |
+
# c1
|
55 |
+
test.add_edge("a", "b")
|
56 |
+
test.add_edge("a", "c")
|
57 |
+
test.add_edge("b", "c")
|
58 |
+
|
59 |
+
# connection
|
60 |
+
test.add_edge("c", "d")
|
61 |
+
|
62 |
+
# c2
|
63 |
+
test.add_edge("d", "e")
|
64 |
+
test.add_edge("d", "f")
|
65 |
+
test.add_edge("f", "e")
|
66 |
+
|
67 |
+
# ground truth
|
68 |
+
ground_truth = {frozenset(["a", "c", "b"]), frozenset(["e", "d", "f"])}
|
69 |
+
|
70 |
+
communities = asyn_fluidc(test, 2, seed=7)
|
71 |
+
result = {frozenset(c) for c in communities}
|
72 |
+
assert result == ground_truth
|
73 |
+
|
74 |
+
|
75 |
+
def test_five_clique_ring():
|
76 |
+
test = Graph()
|
77 |
+
|
78 |
+
# c1
|
79 |
+
test.add_edge("1a", "1b")
|
80 |
+
test.add_edge("1a", "1c")
|
81 |
+
test.add_edge("1a", "1d")
|
82 |
+
test.add_edge("1b", "1c")
|
83 |
+
test.add_edge("1b", "1d")
|
84 |
+
test.add_edge("1c", "1d")
|
85 |
+
|
86 |
+
# c2
|
87 |
+
test.add_edge("2a", "2b")
|
88 |
+
test.add_edge("2a", "2c")
|
89 |
+
test.add_edge("2a", "2d")
|
90 |
+
test.add_edge("2b", "2c")
|
91 |
+
test.add_edge("2b", "2d")
|
92 |
+
test.add_edge("2c", "2d")
|
93 |
+
|
94 |
+
# c3
|
95 |
+
test.add_edge("3a", "3b")
|
96 |
+
test.add_edge("3a", "3c")
|
97 |
+
test.add_edge("3a", "3d")
|
98 |
+
test.add_edge("3b", "3c")
|
99 |
+
test.add_edge("3b", "3d")
|
100 |
+
test.add_edge("3c", "3d")
|
101 |
+
|
102 |
+
# c4
|
103 |
+
test.add_edge("4a", "4b")
|
104 |
+
test.add_edge("4a", "4c")
|
105 |
+
test.add_edge("4a", "4d")
|
106 |
+
test.add_edge("4b", "4c")
|
107 |
+
test.add_edge("4b", "4d")
|
108 |
+
test.add_edge("4c", "4d")
|
109 |
+
|
110 |
+
# c5
|
111 |
+
test.add_edge("5a", "5b")
|
112 |
+
test.add_edge("5a", "5c")
|
113 |
+
test.add_edge("5a", "5d")
|
114 |
+
test.add_edge("5b", "5c")
|
115 |
+
test.add_edge("5b", "5d")
|
116 |
+
test.add_edge("5c", "5d")
|
117 |
+
|
118 |
+
# connections
|
119 |
+
test.add_edge("1a", "2c")
|
120 |
+
test.add_edge("2a", "3c")
|
121 |
+
test.add_edge("3a", "4c")
|
122 |
+
test.add_edge("4a", "5c")
|
123 |
+
test.add_edge("5a", "1c")
|
124 |
+
|
125 |
+
# ground truth
|
126 |
+
ground_truth = {
|
127 |
+
frozenset(["1a", "1b", "1c", "1d"]),
|
128 |
+
frozenset(["2a", "2b", "2c", "2d"]),
|
129 |
+
frozenset(["3a", "3b", "3c", "3d"]),
|
130 |
+
frozenset(["4a", "4b", "4c", "4d"]),
|
131 |
+
frozenset(["5a", "5b", "5c", "5d"]),
|
132 |
+
}
|
133 |
+
|
134 |
+
communities = asyn_fluidc(test, 5, seed=9)
|
135 |
+
result = {frozenset(c) for c in communities}
|
136 |
+
assert result == ground_truth
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_centrality.py
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Unit tests for the :mod:`networkx.algorithms.community.centrality`
|
2 |
+
module.
|
3 |
+
|
4 |
+
"""
|
5 |
+
from operator import itemgetter
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
|
9 |
+
|
10 |
+
def set_of_sets(iterable):
|
11 |
+
return set(map(frozenset, iterable))
|
12 |
+
|
13 |
+
|
14 |
+
def validate_communities(result, expected):
|
15 |
+
assert set_of_sets(result) == set_of_sets(expected)
|
16 |
+
|
17 |
+
|
18 |
+
def validate_possible_communities(result, *expected):
|
19 |
+
assert any(set_of_sets(result) == set_of_sets(p) for p in expected)
|
20 |
+
|
21 |
+
|
22 |
+
class TestGirvanNewman:
|
23 |
+
"""Unit tests for the
|
24 |
+
:func:`networkx.algorithms.community.centrality.girvan_newman`
|
25 |
+
function.
|
26 |
+
|
27 |
+
"""
|
28 |
+
|
29 |
+
def test_no_edges(self):
|
30 |
+
G = nx.empty_graph(3)
|
31 |
+
communities = list(nx.community.girvan_newman(G))
|
32 |
+
assert len(communities) == 1
|
33 |
+
validate_communities(communities[0], [{0}, {1}, {2}])
|
34 |
+
|
35 |
+
def test_undirected(self):
|
36 |
+
# Start with the graph .-.-.-.
|
37 |
+
G = nx.path_graph(4)
|
38 |
+
communities = list(nx.community.girvan_newman(G))
|
39 |
+
assert len(communities) == 3
|
40 |
+
# After one removal, we get the graph .-. .-.
|
41 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
42 |
+
# After the next, we get the graph .-. . ., but there are two
|
43 |
+
# symmetric possible versions.
|
44 |
+
validate_possible_communities(
|
45 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
46 |
+
)
|
47 |
+
# After the last removal, we always get the empty graph.
|
48 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
49 |
+
|
50 |
+
def test_directed(self):
|
51 |
+
G = nx.DiGraph(nx.path_graph(4))
|
52 |
+
communities = list(nx.community.girvan_newman(G))
|
53 |
+
assert len(communities) == 3
|
54 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
55 |
+
validate_possible_communities(
|
56 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
57 |
+
)
|
58 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
59 |
+
|
60 |
+
def test_selfloops(self):
|
61 |
+
G = nx.path_graph(4)
|
62 |
+
G.add_edge(0, 0)
|
63 |
+
G.add_edge(2, 2)
|
64 |
+
communities = list(nx.community.girvan_newman(G))
|
65 |
+
assert len(communities) == 3
|
66 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
67 |
+
validate_possible_communities(
|
68 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
69 |
+
)
|
70 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
71 |
+
|
72 |
+
def test_most_valuable_edge(self):
|
73 |
+
G = nx.Graph()
|
74 |
+
G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)])
|
75 |
+
# Let the most valuable edge be the one with the highest weight.
|
76 |
+
|
77 |
+
def heaviest(G):
|
78 |
+
return max(G.edges(data="weight"), key=itemgetter(2))[:2]
|
79 |
+
|
80 |
+
communities = list(nx.community.girvan_newman(G, heaviest))
|
81 |
+
assert len(communities) == 3
|
82 |
+
validate_communities(communities[0], [{0}, {1, 2, 3}])
|
83 |
+
validate_communities(communities[1], [{0}, {1}, {2, 3}])
|
84 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_divisive.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
|
6 |
+
def test_edge_betweenness_partition():
|
7 |
+
G = nx.barbell_graph(3, 0)
|
8 |
+
C = nx.community.edge_betweenness_partition(G, 2)
|
9 |
+
answer = [{0, 1, 2}, {3, 4, 5}]
|
10 |
+
assert len(C) == len(answer)
|
11 |
+
for s in answer:
|
12 |
+
assert s in C
|
13 |
+
|
14 |
+
G = nx.barbell_graph(3, 1)
|
15 |
+
C = nx.community.edge_betweenness_partition(G, 3)
|
16 |
+
answer = [{0, 1, 2}, {4, 5, 6}, {3}]
|
17 |
+
assert len(C) == len(answer)
|
18 |
+
for s in answer:
|
19 |
+
assert s in C
|
20 |
+
|
21 |
+
C = nx.community.edge_betweenness_partition(G, 7)
|
22 |
+
answer = [{n} for n in G]
|
23 |
+
assert len(C) == len(answer)
|
24 |
+
for s in answer:
|
25 |
+
assert s in C
|
26 |
+
|
27 |
+
C = nx.community.edge_betweenness_partition(G, 1)
|
28 |
+
assert C == [set(G)]
|
29 |
+
|
30 |
+
C = nx.community.edge_betweenness_partition(G, 1, weight="weight")
|
31 |
+
assert C == [set(G)]
|
32 |
+
|
33 |
+
with pytest.raises(nx.NetworkXError):
|
34 |
+
nx.community.edge_betweenness_partition(G, 0)
|
35 |
+
|
36 |
+
with pytest.raises(nx.NetworkXError):
|
37 |
+
nx.community.edge_betweenness_partition(G, -1)
|
38 |
+
|
39 |
+
with pytest.raises(nx.NetworkXError):
|
40 |
+
nx.community.edge_betweenness_partition(G, 10)
|
41 |
+
|
42 |
+
|
43 |
+
def test_edge_current_flow_betweenness_partition():
|
44 |
+
pytest.importorskip("scipy")
|
45 |
+
|
46 |
+
G = nx.barbell_graph(3, 0)
|
47 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 2)
|
48 |
+
answer = [{0, 1, 2}, {3, 4, 5}]
|
49 |
+
assert len(C) == len(answer)
|
50 |
+
for s in answer:
|
51 |
+
assert s in C
|
52 |
+
|
53 |
+
G = nx.barbell_graph(3, 1)
|
54 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 2)
|
55 |
+
answers = [[{0, 1, 2, 3}, {4, 5, 6}], [{0, 1, 2}, {3, 4, 5, 6}]]
|
56 |
+
assert len(C) == len(answers[0])
|
57 |
+
assert any(all(s in answer for s in C) for answer in answers)
|
58 |
+
|
59 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 3)
|
60 |
+
answer = [{0, 1, 2}, {4, 5, 6}, {3}]
|
61 |
+
assert len(C) == len(answer)
|
62 |
+
for s in answer:
|
63 |
+
assert s in C
|
64 |
+
|
65 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 4)
|
66 |
+
answers = [[{1, 2}, {4, 5, 6}, {3}, {0}], [{0, 1, 2}, {5, 6}, {3}, {4}]]
|
67 |
+
assert len(C) == len(answers[0])
|
68 |
+
assert any(all(s in answer for s in C) for answer in answers)
|
69 |
+
|
70 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 5)
|
71 |
+
answer = [{1, 2}, {5, 6}, {3}, {0}, {4}]
|
72 |
+
assert len(C) == len(answer)
|
73 |
+
for s in answer:
|
74 |
+
assert s in C
|
75 |
+
|
76 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 6)
|
77 |
+
answers = [[{2}, {5, 6}, {3}, {0}, {4}, {1}], [{1, 2}, {6}, {3}, {0}, {4}, {5}]]
|
78 |
+
assert len(C) == len(answers[0])
|
79 |
+
assert any(all(s in answer for s in C) for answer in answers)
|
80 |
+
|
81 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 7)
|
82 |
+
answer = [{n} for n in G]
|
83 |
+
assert len(C) == len(answer)
|
84 |
+
for s in answer:
|
85 |
+
assert s in C
|
86 |
+
|
87 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 1)
|
88 |
+
assert C == [set(G)]
|
89 |
+
|
90 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 1, weight="weight")
|
91 |
+
assert C == [set(G)]
|
92 |
+
|
93 |
+
with pytest.raises(nx.NetworkXError):
|
94 |
+
nx.community.edge_current_flow_betweenness_partition(G, 0)
|
95 |
+
|
96 |
+
with pytest.raises(nx.NetworkXError):
|
97 |
+
nx.community.edge_current_flow_betweenness_partition(G, -1)
|
98 |
+
|
99 |
+
with pytest.raises(nx.NetworkXError):
|
100 |
+
nx.community.edge_current_flow_betweenness_partition(G, 10)
|
101 |
+
|
102 |
+
N = 10
|
103 |
+
G = nx.empty_graph(N)
|
104 |
+
for i in range(2, N - 1):
|
105 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, i)
|
106 |
+
assert C == [{n} for n in G]
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_kclique.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import combinations
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
|
8 |
+
def test_overlapping_K5():
|
9 |
+
G = nx.Graph()
|
10 |
+
G.add_edges_from(combinations(range(5), 2)) # Add a five clique
|
11 |
+
G.add_edges_from(combinations(range(2, 7), 2)) # Add another five clique
|
12 |
+
c = list(nx.community.k_clique_communities(G, 4))
|
13 |
+
assert c == [frozenset(range(7))]
|
14 |
+
c = set(nx.community.k_clique_communities(G, 5))
|
15 |
+
assert c == {frozenset(range(5)), frozenset(range(2, 7))}
|
16 |
+
|
17 |
+
|
18 |
+
def test_isolated_K5():
|
19 |
+
G = nx.Graph()
|
20 |
+
G.add_edges_from(combinations(range(5), 2)) # Add a five clique
|
21 |
+
G.add_edges_from(combinations(range(5, 10), 2)) # Add another five clique
|
22 |
+
c = set(nx.community.k_clique_communities(G, 5))
|
23 |
+
assert c == {frozenset(range(5)), frozenset(range(5, 10))}
|
24 |
+
|
25 |
+
|
26 |
+
class TestZacharyKarateClub:
|
27 |
+
def setup_method(self):
|
28 |
+
self.G = nx.karate_club_graph()
|
29 |
+
|
30 |
+
def _check_communities(self, k, expected):
|
31 |
+
communities = set(nx.community.k_clique_communities(self.G, k))
|
32 |
+
assert communities == expected
|
33 |
+
|
34 |
+
def test_k2(self):
|
35 |
+
# clique percolation with k=2 is just connected components
|
36 |
+
expected = {frozenset(self.G)}
|
37 |
+
self._check_communities(2, expected)
|
38 |
+
|
39 |
+
def test_k3(self):
|
40 |
+
comm1 = [
|
41 |
+
0,
|
42 |
+
1,
|
43 |
+
2,
|
44 |
+
3,
|
45 |
+
7,
|
46 |
+
8,
|
47 |
+
12,
|
48 |
+
13,
|
49 |
+
14,
|
50 |
+
15,
|
51 |
+
17,
|
52 |
+
18,
|
53 |
+
19,
|
54 |
+
20,
|
55 |
+
21,
|
56 |
+
22,
|
57 |
+
23,
|
58 |
+
26,
|
59 |
+
27,
|
60 |
+
28,
|
61 |
+
29,
|
62 |
+
30,
|
63 |
+
31,
|
64 |
+
32,
|
65 |
+
33,
|
66 |
+
]
|
67 |
+
comm2 = [0, 4, 5, 6, 10, 16]
|
68 |
+
comm3 = [24, 25, 31]
|
69 |
+
expected = {frozenset(comm1), frozenset(comm2), frozenset(comm3)}
|
70 |
+
self._check_communities(3, expected)
|
71 |
+
|
72 |
+
def test_k4(self):
|
73 |
+
expected = {
|
74 |
+
frozenset([0, 1, 2, 3, 7, 13]),
|
75 |
+
frozenset([8, 32, 30, 33]),
|
76 |
+
frozenset([32, 33, 29, 23]),
|
77 |
+
}
|
78 |
+
self._check_communities(4, expected)
|
79 |
+
|
80 |
+
def test_k5(self):
|
81 |
+
expected = {frozenset([0, 1, 2, 3, 7, 13])}
|
82 |
+
self._check_communities(5, expected)
|
83 |
+
|
84 |
+
def test_k6(self):
|
85 |
+
expected = set()
|
86 |
+
self._check_communities(6, expected)
|
87 |
+
|
88 |
+
|
89 |
+
def test_bad_k():
|
90 |
+
with pytest.raises(nx.NetworkXError):
|
91 |
+
list(nx.community.k_clique_communities(nx.Graph(), 1))
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Unit tests for the :mod:`networkx.algorithms.community.kernighan_lin`
|
2 |
+
module.
|
3 |
+
"""
|
4 |
+
from itertools import permutations
|
5 |
+
|
6 |
+
import pytest
|
7 |
+
|
8 |
+
import networkx as nx
|
9 |
+
from networkx.algorithms.community import kernighan_lin_bisection
|
10 |
+
|
11 |
+
|
12 |
+
def assert_partition_equal(x, y):
|
13 |
+
assert set(map(frozenset, x)) == set(map(frozenset, y))
|
14 |
+
|
15 |
+
|
16 |
+
def test_partition():
|
17 |
+
G = nx.barbell_graph(3, 0)
|
18 |
+
C = kernighan_lin_bisection(G)
|
19 |
+
assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}])
|
20 |
+
|
21 |
+
|
22 |
+
def test_partition_argument():
|
23 |
+
G = nx.barbell_graph(3, 0)
|
24 |
+
partition = [{0, 1, 2}, {3, 4, 5}]
|
25 |
+
C = kernighan_lin_bisection(G, partition)
|
26 |
+
assert_partition_equal(C, partition)
|
27 |
+
|
28 |
+
|
29 |
+
def test_partition_argument_non_integer_nodes():
|
30 |
+
G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
|
31 |
+
partition = ({"A", "B"}, {"C", "D"})
|
32 |
+
C = kernighan_lin_bisection(G, partition)
|
33 |
+
assert_partition_equal(C, partition)
|
34 |
+
|
35 |
+
|
36 |
+
def test_seed_argument():
|
37 |
+
G = nx.barbell_graph(3, 0)
|
38 |
+
C = kernighan_lin_bisection(G, seed=1)
|
39 |
+
assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}])
|
40 |
+
|
41 |
+
|
42 |
+
def test_non_disjoint_partition():
|
43 |
+
with pytest.raises(nx.NetworkXError):
|
44 |
+
G = nx.barbell_graph(3, 0)
|
45 |
+
partition = ({0, 1, 2}, {2, 3, 4, 5})
|
46 |
+
kernighan_lin_bisection(G, partition)
|
47 |
+
|
48 |
+
|
49 |
+
def test_too_many_blocks():
|
50 |
+
with pytest.raises(nx.NetworkXError):
|
51 |
+
G = nx.barbell_graph(3, 0)
|
52 |
+
partition = ({0, 1}, {2}, {3, 4, 5})
|
53 |
+
kernighan_lin_bisection(G, partition)
|
54 |
+
|
55 |
+
|
56 |
+
def test_multigraph():
|
57 |
+
G = nx.cycle_graph(4)
|
58 |
+
M = nx.MultiGraph(G.edges())
|
59 |
+
M.add_edges_from(G.edges())
|
60 |
+
M.remove_edge(1, 2)
|
61 |
+
for labels in permutations(range(4)):
|
62 |
+
mapping = dict(zip(M, labels))
|
63 |
+
A, B = kernighan_lin_bisection(nx.relabel_nodes(M, mapping), seed=0)
|
64 |
+
assert_partition_equal(
|
65 |
+
[A, B], [{mapping[0], mapping[1]}, {mapping[2], mapping[3]}]
|
66 |
+
)
|
67 |
+
|
68 |
+
|
69 |
+
def test_max_iter_argument():
|
70 |
+
G = nx.Graph(
|
71 |
+
[
|
72 |
+
("A", "B", {"weight": 1}),
|
73 |
+
("A", "C", {"weight": 2}),
|
74 |
+
("A", "D", {"weight": 3}),
|
75 |
+
("A", "E", {"weight": 2}),
|
76 |
+
("A", "F", {"weight": 4}),
|
77 |
+
("B", "C", {"weight": 1}),
|
78 |
+
("B", "D", {"weight": 4}),
|
79 |
+
("B", "E", {"weight": 2}),
|
80 |
+
("B", "F", {"weight": 1}),
|
81 |
+
("C", "D", {"weight": 3}),
|
82 |
+
("C", "E", {"weight": 2}),
|
83 |
+
("C", "F", {"weight": 1}),
|
84 |
+
("D", "E", {"weight": 4}),
|
85 |
+
("D", "F", {"weight": 3}),
|
86 |
+
("E", "F", {"weight": 2}),
|
87 |
+
]
|
88 |
+
)
|
89 |
+
partition = ({"A", "B", "C"}, {"D", "E", "F"})
|
90 |
+
C = kernighan_lin_bisection(G, partition, max_iter=1)
|
91 |
+
assert_partition_equal(C, ({"A", "F", "C"}, {"D", "E", "B"}))
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_label_propagation.py
ADDED
@@ -0,0 +1,241 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import chain, combinations
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
|
8 |
+
def test_directed_not_supported():
|
9 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
10 |
+
# not supported for directed graphs
|
11 |
+
test = nx.DiGraph()
|
12 |
+
test.add_edge("a", "b")
|
13 |
+
test.add_edge("a", "c")
|
14 |
+
test.add_edge("b", "d")
|
15 |
+
result = nx.community.label_propagation_communities(test)
|
16 |
+
|
17 |
+
|
18 |
+
def test_iterator_vs_iterable():
|
19 |
+
G = nx.empty_graph("a")
|
20 |
+
assert list(nx.community.label_propagation_communities(G)) == [{"a"}]
|
21 |
+
for community in nx.community.label_propagation_communities(G):
|
22 |
+
assert community == {"a"}
|
23 |
+
pytest.raises(TypeError, next, nx.community.label_propagation_communities(G))
|
24 |
+
|
25 |
+
|
26 |
+
def test_one_node():
|
27 |
+
test = nx.Graph()
|
28 |
+
test.add_node("a")
|
29 |
+
|
30 |
+
# The expected communities are:
|
31 |
+
ground_truth = {frozenset(["a"])}
|
32 |
+
|
33 |
+
communities = nx.community.label_propagation_communities(test)
|
34 |
+
result = {frozenset(c) for c in communities}
|
35 |
+
assert result == ground_truth
|
36 |
+
|
37 |
+
|
38 |
+
def test_unconnected_communities():
|
39 |
+
test = nx.Graph()
|
40 |
+
# community 1
|
41 |
+
test.add_edge("a", "c")
|
42 |
+
test.add_edge("a", "d")
|
43 |
+
test.add_edge("d", "c")
|
44 |
+
# community 2
|
45 |
+
test.add_edge("b", "e")
|
46 |
+
test.add_edge("e", "f")
|
47 |
+
test.add_edge("f", "b")
|
48 |
+
|
49 |
+
# The expected communities are:
|
50 |
+
ground_truth = {frozenset(["a", "c", "d"]), frozenset(["b", "e", "f"])}
|
51 |
+
|
52 |
+
communities = nx.community.label_propagation_communities(test)
|
53 |
+
result = {frozenset(c) for c in communities}
|
54 |
+
assert result == ground_truth
|
55 |
+
|
56 |
+
|
57 |
+
def test_connected_communities():
|
58 |
+
test = nx.Graph()
|
59 |
+
# community 1
|
60 |
+
test.add_edge("a", "b")
|
61 |
+
test.add_edge("c", "a")
|
62 |
+
test.add_edge("c", "b")
|
63 |
+
test.add_edge("d", "a")
|
64 |
+
test.add_edge("d", "b")
|
65 |
+
test.add_edge("d", "c")
|
66 |
+
test.add_edge("e", "a")
|
67 |
+
test.add_edge("e", "b")
|
68 |
+
test.add_edge("e", "c")
|
69 |
+
test.add_edge("e", "d")
|
70 |
+
# community 2
|
71 |
+
test.add_edge("1", "2")
|
72 |
+
test.add_edge("3", "1")
|
73 |
+
test.add_edge("3", "2")
|
74 |
+
test.add_edge("4", "1")
|
75 |
+
test.add_edge("4", "2")
|
76 |
+
test.add_edge("4", "3")
|
77 |
+
test.add_edge("5", "1")
|
78 |
+
test.add_edge("5", "2")
|
79 |
+
test.add_edge("5", "3")
|
80 |
+
test.add_edge("5", "4")
|
81 |
+
# edge between community 1 and 2
|
82 |
+
test.add_edge("a", "1")
|
83 |
+
# community 3
|
84 |
+
test.add_edge("x", "y")
|
85 |
+
# community 4 with only a single node
|
86 |
+
test.add_node("z")
|
87 |
+
|
88 |
+
# The expected communities are:
|
89 |
+
ground_truth1 = {
|
90 |
+
frozenset(["a", "b", "c", "d", "e"]),
|
91 |
+
frozenset(["1", "2", "3", "4", "5"]),
|
92 |
+
frozenset(["x", "y"]),
|
93 |
+
frozenset(["z"]),
|
94 |
+
}
|
95 |
+
ground_truth2 = {
|
96 |
+
frozenset(["a", "b", "c", "d", "e", "1", "2", "3", "4", "5"]),
|
97 |
+
frozenset(["x", "y"]),
|
98 |
+
frozenset(["z"]),
|
99 |
+
}
|
100 |
+
ground_truth = (ground_truth1, ground_truth2)
|
101 |
+
|
102 |
+
communities = nx.community.label_propagation_communities(test)
|
103 |
+
result = {frozenset(c) for c in communities}
|
104 |
+
assert result in ground_truth
|
105 |
+
|
106 |
+
|
107 |
+
def test_termination():
|
108 |
+
# ensure termination of asyn_lpa_communities in two cases
|
109 |
+
# that led to an endless loop in a previous version
|
110 |
+
test1 = nx.karate_club_graph()
|
111 |
+
test2 = nx.caveman_graph(2, 10)
|
112 |
+
test2.add_edges_from([(0, 20), (20, 10)])
|
113 |
+
nx.community.asyn_lpa_communities(test1)
|
114 |
+
nx.community.asyn_lpa_communities(test2)
|
115 |
+
|
116 |
+
|
117 |
+
class TestAsynLpaCommunities:
|
118 |
+
def _check_communities(self, G, expected):
|
119 |
+
"""Checks that the communities computed from the given graph ``G``
|
120 |
+
using the :func:`~networkx.asyn_lpa_communities` function match
|
121 |
+
the set of nodes given in ``expected``.
|
122 |
+
|
123 |
+
``expected`` must be a :class:`set` of :class:`frozenset`
|
124 |
+
instances, each element of which is a node in the graph.
|
125 |
+
|
126 |
+
"""
|
127 |
+
communities = nx.community.asyn_lpa_communities(G)
|
128 |
+
result = {frozenset(c) for c in communities}
|
129 |
+
assert result == expected
|
130 |
+
|
131 |
+
def test_null_graph(self):
|
132 |
+
G = nx.null_graph()
|
133 |
+
ground_truth = set()
|
134 |
+
self._check_communities(G, ground_truth)
|
135 |
+
|
136 |
+
def test_single_node(self):
|
137 |
+
G = nx.empty_graph(1)
|
138 |
+
ground_truth = {frozenset([0])}
|
139 |
+
self._check_communities(G, ground_truth)
|
140 |
+
|
141 |
+
def test_simple_communities(self):
|
142 |
+
# This graph is the disjoint union of two triangles.
|
143 |
+
G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"])
|
144 |
+
ground_truth = {frozenset("abc"), frozenset("def")}
|
145 |
+
self._check_communities(G, ground_truth)
|
146 |
+
|
147 |
+
def test_seed_argument(self):
|
148 |
+
G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"])
|
149 |
+
ground_truth = {frozenset("abc"), frozenset("def")}
|
150 |
+
communities = nx.community.asyn_lpa_communities(G, seed=1)
|
151 |
+
result = {frozenset(c) for c in communities}
|
152 |
+
assert result == ground_truth
|
153 |
+
|
154 |
+
def test_several_communities(self):
|
155 |
+
# This graph is the disjoint union of five triangles.
|
156 |
+
ground_truth = {frozenset(range(3 * i, 3 * (i + 1))) for i in range(5)}
|
157 |
+
edges = chain.from_iterable(combinations(c, 2) for c in ground_truth)
|
158 |
+
G = nx.Graph(edges)
|
159 |
+
self._check_communities(G, ground_truth)
|
160 |
+
|
161 |
+
|
162 |
+
class TestFastLabelPropagationCommunities:
|
163 |
+
N = 100 # number of nodes
|
164 |
+
K = 15 # average node degree
|
165 |
+
|
166 |
+
def _check_communities(self, G, truth, weight=None, seed=42):
|
167 |
+
C = nx.community.fast_label_propagation_communities(G, weight=weight, seed=seed)
|
168 |
+
assert {frozenset(c) for c in C} == truth
|
169 |
+
|
170 |
+
def test_null_graph(self):
|
171 |
+
G = nx.null_graph()
|
172 |
+
truth = set()
|
173 |
+
self._check_communities(G, truth)
|
174 |
+
|
175 |
+
def test_empty_graph(self):
|
176 |
+
G = nx.empty_graph(self.N)
|
177 |
+
truth = {frozenset([i]) for i in G}
|
178 |
+
self._check_communities(G, truth)
|
179 |
+
|
180 |
+
def test_star_graph(self):
|
181 |
+
G = nx.star_graph(self.N)
|
182 |
+
truth = {frozenset(G)}
|
183 |
+
self._check_communities(G, truth)
|
184 |
+
|
185 |
+
def test_complete_graph(self):
|
186 |
+
G = nx.complete_graph(self.N)
|
187 |
+
truth = {frozenset(G)}
|
188 |
+
self._check_communities(G, truth)
|
189 |
+
|
190 |
+
def test_bipartite_graph(self):
|
191 |
+
G = nx.complete_bipartite_graph(self.N // 2, self.N // 2)
|
192 |
+
truth = {frozenset(G)}
|
193 |
+
self._check_communities(G, truth)
|
194 |
+
|
195 |
+
def test_random_graph(self):
|
196 |
+
G = nx.gnm_random_graph(self.N, self.N * self.K // 2, seed=42)
|
197 |
+
truth = {frozenset(G)}
|
198 |
+
self._check_communities(G, truth)
|
199 |
+
|
200 |
+
def test_disjoin_cliques(self):
|
201 |
+
G = nx.Graph(["ab", "AB", "AC", "BC", "12", "13", "14", "23", "24", "34"])
|
202 |
+
truth = {frozenset("ab"), frozenset("ABC"), frozenset("1234")}
|
203 |
+
self._check_communities(G, truth)
|
204 |
+
|
205 |
+
def test_ring_of_cliques(self):
|
206 |
+
N, K = self.N, self.K
|
207 |
+
G = nx.ring_of_cliques(N, K)
|
208 |
+
truth = {frozenset([K * i + k for k in range(K)]) for i in range(N)}
|
209 |
+
self._check_communities(G, truth)
|
210 |
+
|
211 |
+
def test_larger_graph(self):
|
212 |
+
G = nx.gnm_random_graph(100 * self.N, 50 * self.N * self.K, seed=42)
|
213 |
+
nx.community.fast_label_propagation_communities(G)
|
214 |
+
|
215 |
+
def test_graph_type(self):
|
216 |
+
G1 = nx.complete_graph(self.N, nx.MultiDiGraph())
|
217 |
+
G2 = nx.MultiGraph(G1)
|
218 |
+
G3 = nx.DiGraph(G1)
|
219 |
+
G4 = nx.Graph(G1)
|
220 |
+
truth = {frozenset(G1)}
|
221 |
+
self._check_communities(G1, truth)
|
222 |
+
self._check_communities(G2, truth)
|
223 |
+
self._check_communities(G3, truth)
|
224 |
+
self._check_communities(G4, truth)
|
225 |
+
|
226 |
+
def test_weight_argument(self):
|
227 |
+
G = nx.MultiDiGraph()
|
228 |
+
G.add_edge(1, 2, weight=1.41)
|
229 |
+
G.add_edge(2, 1, weight=1.41)
|
230 |
+
G.add_edge(2, 3)
|
231 |
+
G.add_edge(3, 4, weight=3.14)
|
232 |
+
truth = {frozenset({1, 2}), frozenset({3, 4})}
|
233 |
+
self._check_communities(G, truth, weight="weight")
|
234 |
+
|
235 |
+
def test_seed_argument(self):
|
236 |
+
G = nx.karate_club_graph()
|
237 |
+
C = nx.community.fast_label_propagation_communities(G, seed=2023)
|
238 |
+
truth = {frozenset(c) for c in C}
|
239 |
+
self._check_communities(G, truth, seed=2023)
|
240 |
+
# smoke test that seed=None works
|
241 |
+
C = nx.community.fast_label_propagation_communities(G, seed=None)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_louvain.py
ADDED
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
|
6 |
+
def test_modularity_increase():
|
7 |
+
G = nx.LFR_benchmark_graph(
|
8 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
9 |
+
)
|
10 |
+
partition = [{u} for u in G.nodes()]
|
11 |
+
mod = nx.community.modularity(G, partition)
|
12 |
+
partition = nx.community.louvain_communities(G)
|
13 |
+
|
14 |
+
assert nx.community.modularity(G, partition) > mod
|
15 |
+
|
16 |
+
|
17 |
+
def test_valid_partition():
|
18 |
+
G = nx.LFR_benchmark_graph(
|
19 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
20 |
+
)
|
21 |
+
H = G.to_directed()
|
22 |
+
partition = nx.community.louvain_communities(G)
|
23 |
+
partition2 = nx.community.louvain_communities(H)
|
24 |
+
|
25 |
+
assert nx.community.is_partition(G, partition)
|
26 |
+
assert nx.community.is_partition(H, partition2)
|
27 |
+
|
28 |
+
|
29 |
+
def test_karate_club_partition():
|
30 |
+
G = nx.karate_club_graph()
|
31 |
+
part = [
|
32 |
+
{0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
|
33 |
+
{16, 4, 5, 6, 10},
|
34 |
+
{23, 25, 27, 28, 24, 31},
|
35 |
+
{32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
|
36 |
+
]
|
37 |
+
partition = nx.community.louvain_communities(G, seed=2, weight=None)
|
38 |
+
|
39 |
+
assert part == partition
|
40 |
+
|
41 |
+
|
42 |
+
def test_partition_iterator():
|
43 |
+
G = nx.path_graph(15)
|
44 |
+
parts_iter = nx.community.louvain_partitions(G, seed=42)
|
45 |
+
first_part = next(parts_iter)
|
46 |
+
first_copy = [s.copy() for s in first_part]
|
47 |
+
|
48 |
+
# gh-5901 reports sets changing after next partition is yielded
|
49 |
+
assert first_copy[0] == first_part[0]
|
50 |
+
second_part = next(parts_iter)
|
51 |
+
assert first_copy[0] == first_part[0]
|
52 |
+
|
53 |
+
|
54 |
+
def test_undirected_selfloops():
|
55 |
+
G = nx.karate_club_graph()
|
56 |
+
expected_partition = nx.community.louvain_communities(G, seed=2, weight=None)
|
57 |
+
part = [
|
58 |
+
{0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
|
59 |
+
{16, 4, 5, 6, 10},
|
60 |
+
{23, 25, 27, 28, 24, 31},
|
61 |
+
{32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
|
62 |
+
]
|
63 |
+
assert expected_partition == part
|
64 |
+
|
65 |
+
G.add_weighted_edges_from([(i, i, i * 1000) for i in range(9)])
|
66 |
+
# large self-loop weight impacts partition
|
67 |
+
partition = nx.community.louvain_communities(G, seed=2, weight="weight")
|
68 |
+
assert part != partition
|
69 |
+
|
70 |
+
# small self-loop weights aren't enough to impact partition in this graph
|
71 |
+
partition = nx.community.louvain_communities(G, seed=2, weight=None)
|
72 |
+
assert part == partition
|
73 |
+
|
74 |
+
|
75 |
+
def test_directed_selfloops():
|
76 |
+
G = nx.DiGraph()
|
77 |
+
G.add_nodes_from(range(11))
|
78 |
+
G_edges = [
|
79 |
+
(0, 2),
|
80 |
+
(0, 1),
|
81 |
+
(1, 0),
|
82 |
+
(2, 1),
|
83 |
+
(2, 0),
|
84 |
+
(3, 4),
|
85 |
+
(4, 3),
|
86 |
+
(7, 8),
|
87 |
+
(8, 7),
|
88 |
+
(9, 10),
|
89 |
+
(10, 9),
|
90 |
+
]
|
91 |
+
G.add_edges_from(G_edges)
|
92 |
+
G_expected_partition = nx.community.louvain_communities(G, seed=123, weight=None)
|
93 |
+
|
94 |
+
G.add_weighted_edges_from([(i, i, i * 1000) for i in range(3)])
|
95 |
+
# large self-loop weight impacts partition
|
96 |
+
G_partition = nx.community.louvain_communities(G, seed=123, weight="weight")
|
97 |
+
assert G_partition != G_expected_partition
|
98 |
+
|
99 |
+
# small self-loop weights aren't enough to impact partition in this graph
|
100 |
+
G_partition = nx.community.louvain_communities(G, seed=123, weight=None)
|
101 |
+
assert G_partition == G_expected_partition
|
102 |
+
|
103 |
+
|
104 |
+
def test_directed_partition():
|
105 |
+
"""
|
106 |
+
Test 2 cases that were looping infinitely
|
107 |
+
from issues #5175 and #5704
|
108 |
+
"""
|
109 |
+
G = nx.DiGraph()
|
110 |
+
H = nx.DiGraph()
|
111 |
+
G.add_nodes_from(range(10))
|
112 |
+
H.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
|
113 |
+
G_edges = [
|
114 |
+
(0, 2),
|
115 |
+
(0, 1),
|
116 |
+
(1, 0),
|
117 |
+
(2, 1),
|
118 |
+
(2, 0),
|
119 |
+
(3, 4),
|
120 |
+
(4, 3),
|
121 |
+
(7, 8),
|
122 |
+
(8, 7),
|
123 |
+
(9, 10),
|
124 |
+
(10, 9),
|
125 |
+
]
|
126 |
+
H_edges = [
|
127 |
+
(1, 2),
|
128 |
+
(1, 6),
|
129 |
+
(1, 9),
|
130 |
+
(2, 3),
|
131 |
+
(2, 4),
|
132 |
+
(2, 5),
|
133 |
+
(3, 4),
|
134 |
+
(4, 3),
|
135 |
+
(4, 5),
|
136 |
+
(5, 4),
|
137 |
+
(6, 7),
|
138 |
+
(6, 8),
|
139 |
+
(9, 10),
|
140 |
+
(9, 11),
|
141 |
+
(10, 11),
|
142 |
+
(11, 10),
|
143 |
+
]
|
144 |
+
G.add_edges_from(G_edges)
|
145 |
+
H.add_edges_from(H_edges)
|
146 |
+
|
147 |
+
G_expected_partition = [{0, 1, 2}, {3, 4}, {5}, {6}, {8, 7}, {9, 10}]
|
148 |
+
G_partition = nx.community.louvain_communities(G, seed=123, weight=None)
|
149 |
+
|
150 |
+
H_expected_partition = [{2, 3, 4, 5}, {8, 1, 6, 7}, {9, 10, 11}]
|
151 |
+
H_partition = nx.community.louvain_communities(H, seed=123, weight=None)
|
152 |
+
|
153 |
+
assert G_partition == G_expected_partition
|
154 |
+
assert H_partition == H_expected_partition
|
155 |
+
|
156 |
+
|
157 |
+
def test_none_weight_param():
|
158 |
+
G = nx.karate_club_graph()
|
159 |
+
nx.set_edge_attributes(
|
160 |
+
G, {edge: i * i for i, edge in enumerate(G.edges)}, name="foo"
|
161 |
+
)
|
162 |
+
|
163 |
+
part = [
|
164 |
+
{0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
|
165 |
+
{16, 4, 5, 6, 10},
|
166 |
+
{23, 25, 27, 28, 24, 31},
|
167 |
+
{32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
|
168 |
+
]
|
169 |
+
partition1 = nx.community.louvain_communities(G, weight=None, seed=2)
|
170 |
+
partition2 = nx.community.louvain_communities(G, weight="foo", seed=2)
|
171 |
+
partition3 = nx.community.louvain_communities(G, weight="weight", seed=2)
|
172 |
+
|
173 |
+
assert part == partition1
|
174 |
+
assert part != partition2
|
175 |
+
assert part != partition3
|
176 |
+
assert partition2 != partition3
|
177 |
+
|
178 |
+
|
179 |
+
def test_quality():
|
180 |
+
G = nx.LFR_benchmark_graph(
|
181 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
182 |
+
)
|
183 |
+
H = nx.gn_graph(200, seed=1234)
|
184 |
+
I = nx.MultiGraph(G)
|
185 |
+
J = nx.MultiDiGraph(H)
|
186 |
+
|
187 |
+
partition = nx.community.louvain_communities(G)
|
188 |
+
partition2 = nx.community.louvain_communities(H)
|
189 |
+
partition3 = nx.community.louvain_communities(I)
|
190 |
+
partition4 = nx.community.louvain_communities(J)
|
191 |
+
|
192 |
+
quality = nx.community.partition_quality(G, partition)[0]
|
193 |
+
quality2 = nx.community.partition_quality(H, partition2)[0]
|
194 |
+
quality3 = nx.community.partition_quality(I, partition3)[0]
|
195 |
+
quality4 = nx.community.partition_quality(J, partition4)[0]
|
196 |
+
|
197 |
+
assert quality >= 0.65
|
198 |
+
assert quality2 >= 0.65
|
199 |
+
assert quality3 >= 0.65
|
200 |
+
assert quality4 >= 0.65
|
201 |
+
|
202 |
+
|
203 |
+
def test_multigraph():
|
204 |
+
G = nx.karate_club_graph()
|
205 |
+
H = nx.MultiGraph(G)
|
206 |
+
G.add_edge(0, 1, weight=10)
|
207 |
+
H.add_edge(0, 1, weight=9)
|
208 |
+
G.add_edge(0, 9, foo=20)
|
209 |
+
H.add_edge(0, 9, foo=20)
|
210 |
+
|
211 |
+
partition1 = nx.community.louvain_communities(G, seed=1234)
|
212 |
+
partition2 = nx.community.louvain_communities(H, seed=1234)
|
213 |
+
partition3 = nx.community.louvain_communities(H, weight="foo", seed=1234)
|
214 |
+
|
215 |
+
assert partition1 == partition2 != partition3
|
216 |
+
|
217 |
+
|
218 |
+
def test_resolution():
|
219 |
+
G = nx.LFR_benchmark_graph(
|
220 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
221 |
+
)
|
222 |
+
|
223 |
+
partition1 = nx.community.louvain_communities(G, resolution=0.5, seed=12)
|
224 |
+
partition2 = nx.community.louvain_communities(G, seed=12)
|
225 |
+
partition3 = nx.community.louvain_communities(G, resolution=2, seed=12)
|
226 |
+
|
227 |
+
assert len(partition1) <= len(partition2) <= len(partition3)
|
228 |
+
|
229 |
+
|
230 |
+
def test_threshold():
|
231 |
+
G = nx.LFR_benchmark_graph(
|
232 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
233 |
+
)
|
234 |
+
partition1 = nx.community.louvain_communities(G, threshold=0.3, seed=2)
|
235 |
+
partition2 = nx.community.louvain_communities(G, seed=2)
|
236 |
+
mod1 = nx.community.modularity(G, partition1)
|
237 |
+
mod2 = nx.community.modularity(G, partition2)
|
238 |
+
|
239 |
+
assert mod1 <= mod2
|
240 |
+
|
241 |
+
|
242 |
+
def test_empty_graph():
|
243 |
+
G = nx.Graph()
|
244 |
+
G.add_nodes_from(range(5))
|
245 |
+
expected = [{0}, {1}, {2}, {3}, {4}]
|
246 |
+
assert nx.community.louvain_communities(G) == expected
|
247 |
+
|
248 |
+
|
249 |
+
def test_max_level():
|
250 |
+
G = nx.LFR_benchmark_graph(
|
251 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
252 |
+
)
|
253 |
+
parts_iter = nx.community.louvain_partitions(G, seed=42)
|
254 |
+
for max_level, expected in enumerate(parts_iter, 1):
|
255 |
+
partition = nx.community.louvain_communities(G, max_level=max_level, seed=42)
|
256 |
+
assert partition == expected
|
257 |
+
assert max_level > 1 # Ensure we are actually testing max_level
|
258 |
+
# max_level is an upper limit; it's okay if we stop before it's hit.
|
259 |
+
partition = nx.community.louvain_communities(G, max_level=max_level + 1, seed=42)
|
260 |
+
assert partition == expected
|
261 |
+
with pytest.raises(
|
262 |
+
ValueError, match="max_level argument must be a positive integer"
|
263 |
+
):
|
264 |
+
nx.community.louvain_communities(G, max_level=0)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_lukes.py
ADDED
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import product
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
EWL = "e_weight"
|
8 |
+
NWL = "n_weight"
|
9 |
+
|
10 |
+
|
11 |
+
# first test from the Lukes original paper
|
12 |
+
def paper_1_case(float_edge_wt=False, explicit_node_wt=True, directed=False):
|
13 |
+
# problem-specific constants
|
14 |
+
limit = 3
|
15 |
+
|
16 |
+
# configuration
|
17 |
+
if float_edge_wt:
|
18 |
+
shift = 0.001
|
19 |
+
else:
|
20 |
+
shift = 0
|
21 |
+
|
22 |
+
if directed:
|
23 |
+
example_1 = nx.DiGraph()
|
24 |
+
else:
|
25 |
+
example_1 = nx.Graph()
|
26 |
+
|
27 |
+
# graph creation
|
28 |
+
example_1.add_edge(1, 2, **{EWL: 3 + shift})
|
29 |
+
example_1.add_edge(1, 4, **{EWL: 2 + shift})
|
30 |
+
example_1.add_edge(2, 3, **{EWL: 4 + shift})
|
31 |
+
example_1.add_edge(2, 5, **{EWL: 6 + shift})
|
32 |
+
|
33 |
+
# node weights
|
34 |
+
if explicit_node_wt:
|
35 |
+
nx.set_node_attributes(example_1, 1, NWL)
|
36 |
+
wtu = NWL
|
37 |
+
else:
|
38 |
+
wtu = None
|
39 |
+
|
40 |
+
# partitioning
|
41 |
+
clusters_1 = {
|
42 |
+
frozenset(x)
|
43 |
+
for x in nx.community.lukes_partitioning(
|
44 |
+
example_1, limit, node_weight=wtu, edge_weight=EWL
|
45 |
+
)
|
46 |
+
}
|
47 |
+
|
48 |
+
return clusters_1
|
49 |
+
|
50 |
+
|
51 |
+
# second test from the Lukes original paper
|
52 |
+
def paper_2_case(explicit_edge_wt=True, directed=False):
|
53 |
+
# problem specific constants
|
54 |
+
byte_block_size = 32
|
55 |
+
|
56 |
+
# configuration
|
57 |
+
if directed:
|
58 |
+
example_2 = nx.DiGraph()
|
59 |
+
else:
|
60 |
+
example_2 = nx.Graph()
|
61 |
+
|
62 |
+
if explicit_edge_wt:
|
63 |
+
edic = {EWL: 1}
|
64 |
+
wtu = EWL
|
65 |
+
else:
|
66 |
+
edic = {}
|
67 |
+
wtu = None
|
68 |
+
|
69 |
+
# graph creation
|
70 |
+
example_2.add_edge("name", "home_address", **edic)
|
71 |
+
example_2.add_edge("name", "education", **edic)
|
72 |
+
example_2.add_edge("education", "bs", **edic)
|
73 |
+
example_2.add_edge("education", "ms", **edic)
|
74 |
+
example_2.add_edge("education", "phd", **edic)
|
75 |
+
example_2.add_edge("name", "telephone", **edic)
|
76 |
+
example_2.add_edge("telephone", "home", **edic)
|
77 |
+
example_2.add_edge("telephone", "office", **edic)
|
78 |
+
example_2.add_edge("office", "no1", **edic)
|
79 |
+
example_2.add_edge("office", "no2", **edic)
|
80 |
+
|
81 |
+
example_2.nodes["name"][NWL] = 20
|
82 |
+
example_2.nodes["education"][NWL] = 10
|
83 |
+
example_2.nodes["bs"][NWL] = 1
|
84 |
+
example_2.nodes["ms"][NWL] = 1
|
85 |
+
example_2.nodes["phd"][NWL] = 1
|
86 |
+
example_2.nodes["home_address"][NWL] = 8
|
87 |
+
example_2.nodes["telephone"][NWL] = 8
|
88 |
+
example_2.nodes["home"][NWL] = 8
|
89 |
+
example_2.nodes["office"][NWL] = 4
|
90 |
+
example_2.nodes["no1"][NWL] = 1
|
91 |
+
example_2.nodes["no2"][NWL] = 1
|
92 |
+
|
93 |
+
# partitioning
|
94 |
+
clusters_2 = {
|
95 |
+
frozenset(x)
|
96 |
+
for x in nx.community.lukes_partitioning(
|
97 |
+
example_2, byte_block_size, node_weight=NWL, edge_weight=wtu
|
98 |
+
)
|
99 |
+
}
|
100 |
+
|
101 |
+
return clusters_2
|
102 |
+
|
103 |
+
|
104 |
+
def test_paper_1_case():
|
105 |
+
ground_truth = {frozenset([1, 4]), frozenset([2, 3, 5])}
|
106 |
+
|
107 |
+
tf = (True, False)
|
108 |
+
for flt, nwt, drc in product(tf, tf, tf):
|
109 |
+
part = paper_1_case(flt, nwt, drc)
|
110 |
+
assert part == ground_truth
|
111 |
+
|
112 |
+
|
113 |
+
def test_paper_2_case():
|
114 |
+
ground_truth = {
|
115 |
+
frozenset(["education", "bs", "ms", "phd"]),
|
116 |
+
frozenset(["name", "home_address"]),
|
117 |
+
frozenset(["telephone", "home", "office", "no1", "no2"]),
|
118 |
+
}
|
119 |
+
|
120 |
+
tf = (True, False)
|
121 |
+
for ewt, drc in product(tf, tf):
|
122 |
+
part = paper_2_case(ewt, drc)
|
123 |
+
assert part == ground_truth
|
124 |
+
|
125 |
+
|
126 |
+
def test_mandatory_tree():
|
127 |
+
not_a_tree = nx.complete_graph(4)
|
128 |
+
|
129 |
+
with pytest.raises(nx.NotATree):
|
130 |
+
nx.community.lukes_partitioning(not_a_tree, 5)
|
131 |
+
|
132 |
+
|
133 |
+
def test_mandatory_integrality():
|
134 |
+
byte_block_size = 32
|
135 |
+
|
136 |
+
ex_1_broken = nx.DiGraph()
|
137 |
+
|
138 |
+
ex_1_broken.add_edge(1, 2, **{EWL: 3.2})
|
139 |
+
ex_1_broken.add_edge(1, 4, **{EWL: 2.4})
|
140 |
+
ex_1_broken.add_edge(2, 3, **{EWL: 4.0})
|
141 |
+
ex_1_broken.add_edge(2, 5, **{EWL: 6.3})
|
142 |
+
|
143 |
+
ex_1_broken.nodes[1][NWL] = 1.2 # !
|
144 |
+
ex_1_broken.nodes[2][NWL] = 1
|
145 |
+
ex_1_broken.nodes[3][NWL] = 1
|
146 |
+
ex_1_broken.nodes[4][NWL] = 1
|
147 |
+
ex_1_broken.nodes[5][NWL] = 2
|
148 |
+
|
149 |
+
with pytest.raises(TypeError):
|
150 |
+
nx.community.lukes_partitioning(
|
151 |
+
ex_1_broken, byte_block_size, node_weight=NWL, edge_weight=EWL
|
152 |
+
)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_modularity_max.py
ADDED
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
from networkx.algorithms.community import (
|
5 |
+
greedy_modularity_communities,
|
6 |
+
naive_greedy_modularity_communities,
|
7 |
+
)
|
8 |
+
|
9 |
+
|
10 |
+
@pytest.mark.parametrize(
|
11 |
+
"func", (greedy_modularity_communities, naive_greedy_modularity_communities)
|
12 |
+
)
|
13 |
+
def test_modularity_communities(func):
|
14 |
+
G = nx.karate_club_graph()
|
15 |
+
john_a = frozenset(
|
16 |
+
[8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
|
17 |
+
)
|
18 |
+
mr_hi = frozenset([0, 4, 5, 6, 10, 11, 16, 19])
|
19 |
+
overlap = frozenset([1, 2, 3, 7, 9, 12, 13, 17, 21])
|
20 |
+
expected = {john_a, overlap, mr_hi}
|
21 |
+
assert set(func(G, weight=None)) == expected
|
22 |
+
|
23 |
+
|
24 |
+
@pytest.mark.parametrize(
|
25 |
+
"func", (greedy_modularity_communities, naive_greedy_modularity_communities)
|
26 |
+
)
|
27 |
+
def test_modularity_communities_categorical_labels(func):
|
28 |
+
# Using other than 0-starting contiguous integers as node-labels.
|
29 |
+
G = nx.Graph(
|
30 |
+
[
|
31 |
+
("a", "b"),
|
32 |
+
("a", "c"),
|
33 |
+
("b", "c"),
|
34 |
+
("b", "d"), # inter-community edge
|
35 |
+
("d", "e"),
|
36 |
+
("d", "f"),
|
37 |
+
("d", "g"),
|
38 |
+
("f", "g"),
|
39 |
+
("d", "e"),
|
40 |
+
("f", "e"),
|
41 |
+
]
|
42 |
+
)
|
43 |
+
expected = {frozenset({"f", "g", "e", "d"}), frozenset({"a", "b", "c"})}
|
44 |
+
assert set(func(G)) == expected
|
45 |
+
|
46 |
+
|
47 |
+
def test_greedy_modularity_communities_components():
|
48 |
+
# Test for gh-5530
|
49 |
+
G = nx.Graph([(0, 1), (2, 3), (4, 5), (5, 6)])
|
50 |
+
# usual case with 3 components
|
51 |
+
assert greedy_modularity_communities(G) == [{4, 5, 6}, {0, 1}, {2, 3}]
|
52 |
+
# best_n can make the algorithm continue even when modularity goes down
|
53 |
+
assert greedy_modularity_communities(G, best_n=3) == [{4, 5, 6}, {0, 1}, {2, 3}]
|
54 |
+
assert greedy_modularity_communities(G, best_n=2) == [{0, 1, 4, 5, 6}, {2, 3}]
|
55 |
+
assert greedy_modularity_communities(G, best_n=1) == [{0, 1, 2, 3, 4, 5, 6}]
|
56 |
+
|
57 |
+
|
58 |
+
def test_greedy_modularity_communities_relabeled():
|
59 |
+
# Test for gh-4966
|
60 |
+
G = nx.balanced_tree(2, 2)
|
61 |
+
mapping = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 7: "h"}
|
62 |
+
G = nx.relabel_nodes(G, mapping)
|
63 |
+
expected = [frozenset({"e", "d", "a", "b"}), frozenset({"c", "f", "g"})]
|
64 |
+
assert greedy_modularity_communities(G) == expected
|
65 |
+
|
66 |
+
|
67 |
+
def test_greedy_modularity_communities_directed():
|
68 |
+
G = nx.DiGraph(
|
69 |
+
[
|
70 |
+
("a", "b"),
|
71 |
+
("a", "c"),
|
72 |
+
("b", "c"),
|
73 |
+
("b", "d"), # inter-community edge
|
74 |
+
("d", "e"),
|
75 |
+
("d", "f"),
|
76 |
+
("d", "g"),
|
77 |
+
("f", "g"),
|
78 |
+
("d", "e"),
|
79 |
+
("f", "e"),
|
80 |
+
]
|
81 |
+
)
|
82 |
+
expected = [frozenset({"f", "g", "e", "d"}), frozenset({"a", "b", "c"})]
|
83 |
+
assert greedy_modularity_communities(G) == expected
|
84 |
+
|
85 |
+
# with loops
|
86 |
+
G = nx.DiGraph()
|
87 |
+
G.add_edges_from(
|
88 |
+
[(1, 1), (1, 2), (1, 3), (2, 3), (1, 4), (4, 4), (5, 5), (4, 5), (4, 6), (5, 6)]
|
89 |
+
)
|
90 |
+
expected = [frozenset({1, 2, 3}), frozenset({4, 5, 6})]
|
91 |
+
assert greedy_modularity_communities(G) == expected
|
92 |
+
|
93 |
+
|
94 |
+
@pytest.mark.parametrize(
|
95 |
+
"func", (greedy_modularity_communities, naive_greedy_modularity_communities)
|
96 |
+
)
|
97 |
+
def test_modularity_communities_weighted(func):
|
98 |
+
G = nx.balanced_tree(2, 3)
|
99 |
+
for a, b in G.edges:
|
100 |
+
if ((a == 1) or (a == 2)) and (b != 0):
|
101 |
+
G[a][b]["weight"] = 10.0
|
102 |
+
else:
|
103 |
+
G[a][b]["weight"] = 1.0
|
104 |
+
|
105 |
+
expected = [{0, 1, 3, 4, 7, 8, 9, 10}, {2, 5, 6, 11, 12, 13, 14}]
|
106 |
+
|
107 |
+
assert func(G, weight="weight") == expected
|
108 |
+
assert func(G, weight="weight", resolution=0.9) == expected
|
109 |
+
assert func(G, weight="weight", resolution=0.3) == expected
|
110 |
+
assert func(G, weight="weight", resolution=1.1) != expected
|
111 |
+
|
112 |
+
|
113 |
+
def test_modularity_communities_floating_point():
|
114 |
+
# check for floating point error when used as key in the mapped_queue dict.
|
115 |
+
# Test for gh-4992 and gh-5000
|
116 |
+
G = nx.Graph()
|
117 |
+
G.add_weighted_edges_from(
|
118 |
+
[(0, 1, 12), (1, 4, 71), (2, 3, 15), (2, 4, 10), (3, 6, 13)]
|
119 |
+
)
|
120 |
+
expected = [{0, 1, 4}, {2, 3, 6}]
|
121 |
+
assert greedy_modularity_communities(G, weight="weight") == expected
|
122 |
+
assert (
|
123 |
+
greedy_modularity_communities(G, weight="weight", resolution=0.99) == expected
|
124 |
+
)
|
125 |
+
|
126 |
+
|
127 |
+
def test_modularity_communities_directed_weighted():
|
128 |
+
G = nx.DiGraph()
|
129 |
+
G.add_weighted_edges_from(
|
130 |
+
[
|
131 |
+
(1, 2, 5),
|
132 |
+
(1, 3, 3),
|
133 |
+
(2, 3, 6),
|
134 |
+
(2, 6, 1),
|
135 |
+
(1, 4, 1),
|
136 |
+
(4, 5, 3),
|
137 |
+
(4, 6, 7),
|
138 |
+
(5, 6, 2),
|
139 |
+
(5, 7, 5),
|
140 |
+
(5, 8, 4),
|
141 |
+
(6, 8, 3),
|
142 |
+
]
|
143 |
+
)
|
144 |
+
expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})]
|
145 |
+
assert greedy_modularity_communities(G, weight="weight") == expected
|
146 |
+
|
147 |
+
# A large weight of the edge (2, 6) causes 6 to change group, even if it shares
|
148 |
+
# only one connection with the new group and 3 with the old one.
|
149 |
+
G[2][6]["weight"] = 20
|
150 |
+
expected = [frozenset({1, 2, 3, 6}), frozenset({4, 5, 7, 8})]
|
151 |
+
assert greedy_modularity_communities(G, weight="weight") == expected
|
152 |
+
|
153 |
+
|
154 |
+
def test_greedy_modularity_communities_multigraph():
|
155 |
+
G = nx.MultiGraph()
|
156 |
+
G.add_edges_from(
|
157 |
+
[
|
158 |
+
(1, 2),
|
159 |
+
(1, 2),
|
160 |
+
(1, 3),
|
161 |
+
(2, 3),
|
162 |
+
(1, 4),
|
163 |
+
(2, 4),
|
164 |
+
(4, 5),
|
165 |
+
(5, 6),
|
166 |
+
(5, 7),
|
167 |
+
(5, 7),
|
168 |
+
(6, 7),
|
169 |
+
(7, 8),
|
170 |
+
(5, 8),
|
171 |
+
]
|
172 |
+
)
|
173 |
+
expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
|
174 |
+
assert greedy_modularity_communities(G) == expected
|
175 |
+
|
176 |
+
# Converting (4, 5) into a multi-edge causes node 4 to change group.
|
177 |
+
G.add_edge(4, 5)
|
178 |
+
expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})]
|
179 |
+
assert greedy_modularity_communities(G) == expected
|
180 |
+
|
181 |
+
|
182 |
+
def test_greedy_modularity_communities_multigraph_weighted():
|
183 |
+
G = nx.MultiGraph()
|
184 |
+
G.add_weighted_edges_from(
|
185 |
+
[
|
186 |
+
(1, 2, 5),
|
187 |
+
(1, 2, 3),
|
188 |
+
(1, 3, 6),
|
189 |
+
(1, 3, 6),
|
190 |
+
(2, 3, 4),
|
191 |
+
(1, 4, 1),
|
192 |
+
(1, 4, 1),
|
193 |
+
(2, 4, 3),
|
194 |
+
(2, 4, 3),
|
195 |
+
(4, 5, 1),
|
196 |
+
(5, 6, 3),
|
197 |
+
(5, 6, 7),
|
198 |
+
(5, 6, 4),
|
199 |
+
(5, 7, 9),
|
200 |
+
(5, 7, 9),
|
201 |
+
(6, 7, 8),
|
202 |
+
(7, 8, 2),
|
203 |
+
(7, 8, 2),
|
204 |
+
(5, 8, 6),
|
205 |
+
(5, 8, 6),
|
206 |
+
]
|
207 |
+
)
|
208 |
+
expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
|
209 |
+
assert greedy_modularity_communities(G, weight="weight") == expected
|
210 |
+
|
211 |
+
# Adding multi-edge (4, 5, 16) causes node 4 to change group.
|
212 |
+
G.add_edge(4, 5, weight=16)
|
213 |
+
expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})]
|
214 |
+
assert greedy_modularity_communities(G, weight="weight") == expected
|
215 |
+
|
216 |
+
# Increasing the weight of edge (1, 4) causes node 4 to return to the former group.
|
217 |
+
G[1][4][1]["weight"] = 3
|
218 |
+
expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
|
219 |
+
assert greedy_modularity_communities(G, weight="weight") == expected
|
220 |
+
|
221 |
+
|
222 |
+
def test_greed_modularity_communities_multidigraph():
|
223 |
+
G = nx.MultiDiGraph()
|
224 |
+
G.add_edges_from(
|
225 |
+
[
|
226 |
+
(1, 2),
|
227 |
+
(1, 2),
|
228 |
+
(3, 1),
|
229 |
+
(2, 3),
|
230 |
+
(2, 3),
|
231 |
+
(3, 2),
|
232 |
+
(1, 4),
|
233 |
+
(2, 4),
|
234 |
+
(4, 2),
|
235 |
+
(4, 5),
|
236 |
+
(5, 6),
|
237 |
+
(5, 6),
|
238 |
+
(6, 5),
|
239 |
+
(5, 7),
|
240 |
+
(6, 7),
|
241 |
+
(7, 8),
|
242 |
+
(5, 8),
|
243 |
+
(8, 4),
|
244 |
+
]
|
245 |
+
)
|
246 |
+
expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
|
247 |
+
assert greedy_modularity_communities(G, weight="weight") == expected
|
248 |
+
|
249 |
+
|
250 |
+
def test_greed_modularity_communities_multidigraph_weighted():
|
251 |
+
G = nx.MultiDiGraph()
|
252 |
+
G.add_weighted_edges_from(
|
253 |
+
[
|
254 |
+
(1, 2, 5),
|
255 |
+
(1, 2, 3),
|
256 |
+
(3, 1, 6),
|
257 |
+
(1, 3, 6),
|
258 |
+
(3, 2, 4),
|
259 |
+
(1, 4, 2),
|
260 |
+
(1, 4, 5),
|
261 |
+
(2, 4, 3),
|
262 |
+
(3, 2, 8),
|
263 |
+
(4, 2, 3),
|
264 |
+
(4, 3, 5),
|
265 |
+
(4, 5, 2),
|
266 |
+
(5, 6, 3),
|
267 |
+
(5, 6, 7),
|
268 |
+
(6, 5, 4),
|
269 |
+
(5, 7, 9),
|
270 |
+
(5, 7, 9),
|
271 |
+
(7, 6, 8),
|
272 |
+
(7, 8, 2),
|
273 |
+
(8, 7, 2),
|
274 |
+
(5, 8, 6),
|
275 |
+
(5, 8, 6),
|
276 |
+
]
|
277 |
+
)
|
278 |
+
expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
|
279 |
+
assert greedy_modularity_communities(G, weight="weight") == expected
|
280 |
+
|
281 |
+
|
282 |
+
def test_resolution_parameter_impact():
|
283 |
+
G = nx.barbell_graph(5, 3)
|
284 |
+
|
285 |
+
gamma = 1
|
286 |
+
expected = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))]
|
287 |
+
assert greedy_modularity_communities(G, resolution=gamma) == expected
|
288 |
+
assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
|
289 |
+
|
290 |
+
gamma = 2.5
|
291 |
+
expected = [{0, 1, 2, 3}, {9, 10, 11, 12}, {5, 6, 7}, {4}, {8}]
|
292 |
+
assert greedy_modularity_communities(G, resolution=gamma) == expected
|
293 |
+
assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
|
294 |
+
|
295 |
+
gamma = 0.3
|
296 |
+
expected = [frozenset(range(8)), frozenset(range(8, 13))]
|
297 |
+
assert greedy_modularity_communities(G, resolution=gamma) == expected
|
298 |
+
assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
|
299 |
+
|
300 |
+
|
301 |
+
def test_cutoff_parameter():
|
302 |
+
G = nx.circular_ladder_graph(4)
|
303 |
+
|
304 |
+
# No aggregation:
|
305 |
+
expected = [{k} for k in range(8)]
|
306 |
+
assert greedy_modularity_communities(G, cutoff=8) == expected
|
307 |
+
|
308 |
+
# Aggregation to half order (number of nodes)
|
309 |
+
expected = [{k, k + 1} for k in range(0, 8, 2)]
|
310 |
+
assert greedy_modularity_communities(G, cutoff=4) == expected
|
311 |
+
|
312 |
+
# Default aggregation case (here, 2 communities emerge)
|
313 |
+
expected = [frozenset(range(4)), frozenset(range(4, 8))]
|
314 |
+
assert greedy_modularity_communities(G, cutoff=1) == expected
|
315 |
+
|
316 |
+
|
317 |
+
def test_best_n():
|
318 |
+
G = nx.barbell_graph(5, 3)
|
319 |
+
|
320 |
+
# Same result as without enforcing cutoff:
|
321 |
+
best_n = 3
|
322 |
+
expected = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))]
|
323 |
+
assert greedy_modularity_communities(G, best_n=best_n) == expected
|
324 |
+
|
325 |
+
# One additional merging step:
|
326 |
+
best_n = 2
|
327 |
+
expected = [frozenset(range(8)), frozenset(range(8, 13))]
|
328 |
+
assert greedy_modularity_communities(G, best_n=best_n) == expected
|
329 |
+
|
330 |
+
# Two additional merging steps:
|
331 |
+
best_n = 1
|
332 |
+
expected = [frozenset(range(13))]
|
333 |
+
assert greedy_modularity_communities(G, best_n=best_n) == expected
|
334 |
+
|
335 |
+
|
336 |
+
def test_greedy_modularity_communities_corner_cases():
|
337 |
+
G = nx.empty_graph()
|
338 |
+
assert nx.community.greedy_modularity_communities(G) == []
|
339 |
+
G.add_nodes_from(range(3))
|
340 |
+
assert nx.community.greedy_modularity_communities(G) == [{0}, {1}, {2}]
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_quality.py
ADDED
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Unit tests for the :mod:`networkx.algorithms.community.quality`
|
2 |
+
module.
|
3 |
+
|
4 |
+
"""
|
5 |
+
import pytest
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
from networkx import barbell_graph
|
9 |
+
from networkx.algorithms.community import modularity, partition_quality
|
10 |
+
from networkx.algorithms.community.quality import inter_community_edges
|
11 |
+
|
12 |
+
|
13 |
+
class TestPerformance:
|
14 |
+
"""Unit tests for the :func:`performance` function."""
|
15 |
+
|
16 |
+
def test_bad_partition(self):
|
17 |
+
"""Tests that a poor partition has a low performance measure."""
|
18 |
+
G = barbell_graph(3, 0)
|
19 |
+
partition = [{0, 1, 4}, {2, 3, 5}]
|
20 |
+
assert 8 / 15 == pytest.approx(partition_quality(G, partition)[1], abs=1e-7)
|
21 |
+
|
22 |
+
def test_good_partition(self):
|
23 |
+
"""Tests that a good partition has a high performance measure."""
|
24 |
+
G = barbell_graph(3, 0)
|
25 |
+
partition = [{0, 1, 2}, {3, 4, 5}]
|
26 |
+
assert 14 / 15 == pytest.approx(partition_quality(G, partition)[1], abs=1e-7)
|
27 |
+
|
28 |
+
|
29 |
+
class TestCoverage:
|
30 |
+
"""Unit tests for the :func:`coverage` function."""
|
31 |
+
|
32 |
+
def test_bad_partition(self):
|
33 |
+
"""Tests that a poor partition has a low coverage measure."""
|
34 |
+
G = barbell_graph(3, 0)
|
35 |
+
partition = [{0, 1, 4}, {2, 3, 5}]
|
36 |
+
assert 3 / 7 == pytest.approx(partition_quality(G, partition)[0], abs=1e-7)
|
37 |
+
|
38 |
+
def test_good_partition(self):
|
39 |
+
"""Tests that a good partition has a high coverage measure."""
|
40 |
+
G = barbell_graph(3, 0)
|
41 |
+
partition = [{0, 1, 2}, {3, 4, 5}]
|
42 |
+
assert 6 / 7 == pytest.approx(partition_quality(G, partition)[0], abs=1e-7)
|
43 |
+
|
44 |
+
|
45 |
+
def test_modularity():
|
46 |
+
G = nx.barbell_graph(3, 0)
|
47 |
+
C = [{0, 1, 4}, {2, 3, 5}]
|
48 |
+
assert (-16 / (14**2)) == pytest.approx(modularity(G, C), abs=1e-7)
|
49 |
+
C = [{0, 1, 2}, {3, 4, 5}]
|
50 |
+
assert (35 * 2) / (14**2) == pytest.approx(modularity(G, C), abs=1e-7)
|
51 |
+
|
52 |
+
n = 1000
|
53 |
+
G = nx.erdos_renyi_graph(n, 0.09, seed=42, directed=True)
|
54 |
+
C = [set(range(n // 2)), set(range(n // 2, n))]
|
55 |
+
assert 0.00017154251389292754 == pytest.approx(modularity(G, C), abs=1e-7)
|
56 |
+
|
57 |
+
G = nx.margulis_gabber_galil_graph(10)
|
58 |
+
mid_value = G.number_of_nodes() // 2
|
59 |
+
nodes = list(G.nodes)
|
60 |
+
C = [set(nodes[:mid_value]), set(nodes[mid_value:])]
|
61 |
+
assert 0.13 == pytest.approx(modularity(G, C), abs=1e-7)
|
62 |
+
|
63 |
+
G = nx.DiGraph()
|
64 |
+
G.add_edges_from([(2, 1), (2, 3), (3, 4)])
|
65 |
+
C = [{1, 2}, {3, 4}]
|
66 |
+
assert 2 / 9 == pytest.approx(modularity(G, C), abs=1e-7)
|
67 |
+
|
68 |
+
|
69 |
+
def test_modularity_resolution():
|
70 |
+
G = nx.barbell_graph(3, 0)
|
71 |
+
C = [{0, 1, 4}, {2, 3, 5}]
|
72 |
+
assert modularity(G, C) == pytest.approx(3 / 7 - 100 / 14**2)
|
73 |
+
gamma = 2
|
74 |
+
result = modularity(G, C, resolution=gamma)
|
75 |
+
assert result == pytest.approx(3 / 7 - gamma * 100 / 14**2)
|
76 |
+
gamma = 0.2
|
77 |
+
result = modularity(G, C, resolution=gamma)
|
78 |
+
assert result == pytest.approx(3 / 7 - gamma * 100 / 14**2)
|
79 |
+
|
80 |
+
C = [{0, 1, 2}, {3, 4, 5}]
|
81 |
+
assert modularity(G, C) == pytest.approx(6 / 7 - 98 / 14**2)
|
82 |
+
gamma = 2
|
83 |
+
result = modularity(G, C, resolution=gamma)
|
84 |
+
assert result == pytest.approx(6 / 7 - gamma * 98 / 14**2)
|
85 |
+
gamma = 0.2
|
86 |
+
result = modularity(G, C, resolution=gamma)
|
87 |
+
assert result == pytest.approx(6 / 7 - gamma * 98 / 14**2)
|
88 |
+
|
89 |
+
G = nx.barbell_graph(5, 3)
|
90 |
+
C = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))]
|
91 |
+
gamma = 1
|
92 |
+
result = modularity(G, C, resolution=gamma)
|
93 |
+
# This C is maximal for gamma=1: modularity = 0.518229
|
94 |
+
assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2)))
|
95 |
+
gamma = 2
|
96 |
+
result = modularity(G, C, resolution=gamma)
|
97 |
+
assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2)))
|
98 |
+
gamma = 0.2
|
99 |
+
result = modularity(G, C, resolution=gamma)
|
100 |
+
assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2)))
|
101 |
+
|
102 |
+
C = [{0, 1, 2, 3}, {9, 10, 11, 12}, {5, 6, 7}, {4}, {8}]
|
103 |
+
gamma = 1
|
104 |
+
result = modularity(G, C, resolution=gamma)
|
105 |
+
assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2)))
|
106 |
+
gamma = 2.5
|
107 |
+
result = modularity(G, C, resolution=gamma)
|
108 |
+
# This C is maximal for gamma=2.5: modularity = -0.06553819
|
109 |
+
assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2)))
|
110 |
+
gamma = 0.2
|
111 |
+
result = modularity(G, C, resolution=gamma)
|
112 |
+
assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2)))
|
113 |
+
|
114 |
+
C = [frozenset(range(8)), frozenset(range(8, 13))]
|
115 |
+
gamma = 1
|
116 |
+
result = modularity(G, C, resolution=gamma)
|
117 |
+
assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2)))
|
118 |
+
gamma = 2
|
119 |
+
result = modularity(G, C, resolution=gamma)
|
120 |
+
assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2)))
|
121 |
+
gamma = 0.3
|
122 |
+
result = modularity(G, C, resolution=gamma)
|
123 |
+
# This C is maximal for gamma=0.3: modularity = 0.805990
|
124 |
+
assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2)))
|
125 |
+
|
126 |
+
|
127 |
+
def test_inter_community_edges_with_digraphs():
|
128 |
+
G = nx.complete_graph(2, create_using=nx.DiGraph())
|
129 |
+
partition = [{0}, {1}]
|
130 |
+
assert inter_community_edges(G, partition) == 2
|
131 |
+
|
132 |
+
G = nx.complete_graph(10, create_using=nx.DiGraph())
|
133 |
+
partition = [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}]
|
134 |
+
assert inter_community_edges(G, partition) == 70
|
135 |
+
|
136 |
+
G = nx.cycle_graph(4, create_using=nx.DiGraph())
|
137 |
+
partition = [{0, 1}, {2, 3}]
|
138 |
+
assert inter_community_edges(G, partition) == 2
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_utils.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Unit tests for the :mod:`networkx.algorithms.community.utils` module.
|
2 |
+
|
3 |
+
"""
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
|
8 |
+
def test_is_partition():
|
9 |
+
G = nx.empty_graph(3)
|
10 |
+
assert nx.community.is_partition(G, [{0, 1}, {2}])
|
11 |
+
assert nx.community.is_partition(G, ({0, 1}, {2}))
|
12 |
+
assert nx.community.is_partition(G, ([0, 1], [2]))
|
13 |
+
assert nx.community.is_partition(G, [[0, 1], [2]])
|
14 |
+
|
15 |
+
|
16 |
+
def test_not_covering():
|
17 |
+
G = nx.empty_graph(3)
|
18 |
+
assert not nx.community.is_partition(G, [{0}, {1}])
|
19 |
+
|
20 |
+
|
21 |
+
def test_not_disjoint():
|
22 |
+
G = nx.empty_graph(3)
|
23 |
+
assert not nx.community.is_partition(G, [{0, 1}, {1, 2}])
|
24 |
+
|
25 |
+
|
26 |
+
def test_not_node():
|
27 |
+
G = nx.empty_graph(3)
|
28 |
+
assert not nx.community.is_partition(G, [{0, 1}, {3}])
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf8f81ceb5eaaee1621aa60b892d83e596a6173f6f6517359b679ff3daa1b0f8
|
3 |
+
size 44623
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6f79f0e90fa4c51ec79165f15963e1ed89477576e06bcaa67ae622c260411931
|
3 |
+
size 42248
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ccacba1e0fbfb30bec361f0e48ec88c999d3474fcda5ddf93bd444ace17cfa0e
|
3 |
+
size 88132
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/minors/tests/__pycache__/test_contraction.cpython-310.pyc
ADDED
Binary file (13.4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from networkx.algorithms.operators.all import *
|
2 |
+
from networkx.algorithms.operators.binary import *
|
3 |
+
from networkx.algorithms.operators.product import *
|
4 |
+
from networkx.algorithms.operators.unary import *
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/all.py
ADDED
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Operations on many graphs.
|
2 |
+
"""
|
3 |
+
from itertools import chain, repeat
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
__all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"]
|
8 |
+
|
9 |
+
|
10 |
+
@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True)
|
11 |
+
def union_all(graphs, rename=()):
|
12 |
+
"""Returns the union of all graphs.
|
13 |
+
|
14 |
+
The graphs must be disjoint, otherwise an exception is raised.
|
15 |
+
|
16 |
+
Parameters
|
17 |
+
----------
|
18 |
+
graphs : iterable
|
19 |
+
Iterable of NetworkX graphs
|
20 |
+
|
21 |
+
rename : iterable , optional
|
22 |
+
Node names of graphs can be changed by specifying the tuple
|
23 |
+
rename=('G-','H-') (for example). Node "u" in G is then renamed
|
24 |
+
"G-u" and "v" in H is renamed "H-v". Infinite generators (like itertools.count)
|
25 |
+
are also supported.
|
26 |
+
|
27 |
+
Returns
|
28 |
+
-------
|
29 |
+
U : a graph with the same type as the first graph in list
|
30 |
+
|
31 |
+
Raises
|
32 |
+
------
|
33 |
+
ValueError
|
34 |
+
If `graphs` is an empty list.
|
35 |
+
|
36 |
+
NetworkXError
|
37 |
+
In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs.
|
38 |
+
|
39 |
+
Notes
|
40 |
+
-----
|
41 |
+
For operating on mixed type graphs, they should be converted to the same type.
|
42 |
+
>>> G = nx.Graph()
|
43 |
+
>>> H = nx.DiGraph()
|
44 |
+
>>> GH = union_all([nx.DiGraph(G), H])
|
45 |
+
|
46 |
+
To force a disjoint union with node relabeling, use
|
47 |
+
disjoint_union_all(G,H) or convert_node_labels_to integers().
|
48 |
+
|
49 |
+
Graph, edge, and node attributes are propagated to the union graph.
|
50 |
+
If a graph attribute is present in multiple graphs, then the value
|
51 |
+
from the last graph in the list with that attribute is used.
|
52 |
+
|
53 |
+
Examples
|
54 |
+
--------
|
55 |
+
>>> G1 = nx.Graph([(1, 2), (2, 3)])
|
56 |
+
>>> G2 = nx.Graph([(4, 5), (5, 6)])
|
57 |
+
>>> result_graph = nx.union_all([G1, G2])
|
58 |
+
>>> result_graph.nodes()
|
59 |
+
NodeView((1, 2, 3, 4, 5, 6))
|
60 |
+
>>> result_graph.edges()
|
61 |
+
EdgeView([(1, 2), (2, 3), (4, 5), (5, 6)])
|
62 |
+
|
63 |
+
See Also
|
64 |
+
--------
|
65 |
+
union
|
66 |
+
disjoint_union_all
|
67 |
+
"""
|
68 |
+
R = None
|
69 |
+
seen_nodes = set()
|
70 |
+
|
71 |
+
# rename graph to obtain disjoint node labels
|
72 |
+
def add_prefix(graph, prefix):
|
73 |
+
if prefix is None:
|
74 |
+
return graph
|
75 |
+
|
76 |
+
def label(x):
|
77 |
+
return f"{prefix}{x}"
|
78 |
+
|
79 |
+
return nx.relabel_nodes(graph, label)
|
80 |
+
|
81 |
+
rename = chain(rename, repeat(None))
|
82 |
+
graphs = (add_prefix(G, name) for G, name in zip(graphs, rename))
|
83 |
+
|
84 |
+
for i, G in enumerate(graphs):
|
85 |
+
G_nodes_set = set(G.nodes)
|
86 |
+
if i == 0:
|
87 |
+
# Union is the same type as first graph
|
88 |
+
R = G.__class__()
|
89 |
+
elif G.is_directed() != R.is_directed():
|
90 |
+
raise nx.NetworkXError("All graphs must be directed or undirected.")
|
91 |
+
elif G.is_multigraph() != R.is_multigraph():
|
92 |
+
raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
|
93 |
+
elif not seen_nodes.isdisjoint(G_nodes_set):
|
94 |
+
raise nx.NetworkXError(
|
95 |
+
"The node sets of the graphs are not disjoint.\n"
|
96 |
+
"Use `rename` to specify prefixes for the graphs or use\n"
|
97 |
+
"disjoint_union(G1, G2, ..., GN)."
|
98 |
+
)
|
99 |
+
|
100 |
+
seen_nodes |= G_nodes_set
|
101 |
+
R.graph.update(G.graph)
|
102 |
+
R.add_nodes_from(G.nodes(data=True))
|
103 |
+
R.add_edges_from(
|
104 |
+
G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True)
|
105 |
+
)
|
106 |
+
|
107 |
+
if R is None:
|
108 |
+
raise ValueError("cannot apply union_all to an empty list")
|
109 |
+
|
110 |
+
return R
|
111 |
+
|
112 |
+
|
113 |
+
@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True)
|
114 |
+
def disjoint_union_all(graphs):
|
115 |
+
"""Returns the disjoint union of all graphs.
|
116 |
+
|
117 |
+
This operation forces distinct integer node labels starting with 0
|
118 |
+
for the first graph in the list and numbering consecutively.
|
119 |
+
|
120 |
+
Parameters
|
121 |
+
----------
|
122 |
+
graphs : iterable
|
123 |
+
Iterable of NetworkX graphs
|
124 |
+
|
125 |
+
Returns
|
126 |
+
-------
|
127 |
+
U : A graph with the same type as the first graph in list
|
128 |
+
|
129 |
+
Raises
|
130 |
+
------
|
131 |
+
ValueError
|
132 |
+
If `graphs` is an empty list.
|
133 |
+
|
134 |
+
NetworkXError
|
135 |
+
In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs.
|
136 |
+
|
137 |
+
Examples
|
138 |
+
--------
|
139 |
+
>>> G1 = nx.Graph([(1, 2), (2, 3)])
|
140 |
+
>>> G2 = nx.Graph([(4, 5), (5, 6)])
|
141 |
+
>>> U = nx.disjoint_union_all([G1, G2])
|
142 |
+
>>> list(U.nodes())
|
143 |
+
[0, 1, 2, 3, 4, 5]
|
144 |
+
>>> list(U.edges())
|
145 |
+
[(0, 1), (1, 2), (3, 4), (4, 5)]
|
146 |
+
|
147 |
+
Notes
|
148 |
+
-----
|
149 |
+
For operating on mixed type graphs, they should be converted to the same type.
|
150 |
+
|
151 |
+
Graph, edge, and node attributes are propagated to the union graph.
|
152 |
+
If a graph attribute is present in multiple graphs, then the value
|
153 |
+
from the last graph in the list with that attribute is used.
|
154 |
+
"""
|
155 |
+
|
156 |
+
def yield_relabeled(graphs):
|
157 |
+
first_label = 0
|
158 |
+
for G in graphs:
|
159 |
+
yield nx.convert_node_labels_to_integers(G, first_label=first_label)
|
160 |
+
first_label += len(G)
|
161 |
+
|
162 |
+
R = union_all(yield_relabeled(graphs))
|
163 |
+
|
164 |
+
return R
|
165 |
+
|
166 |
+
|
167 |
+
@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True)
|
168 |
+
def compose_all(graphs):
|
169 |
+
"""Returns the composition of all graphs.
|
170 |
+
|
171 |
+
Composition is the simple union of the node sets and edge sets.
|
172 |
+
The node sets of the supplied graphs need not be disjoint.
|
173 |
+
|
174 |
+
Parameters
|
175 |
+
----------
|
176 |
+
graphs : iterable
|
177 |
+
Iterable of NetworkX graphs
|
178 |
+
|
179 |
+
Returns
|
180 |
+
-------
|
181 |
+
C : A graph with the same type as the first graph in list
|
182 |
+
|
183 |
+
Raises
|
184 |
+
------
|
185 |
+
ValueError
|
186 |
+
If `graphs` is an empty list.
|
187 |
+
|
188 |
+
NetworkXError
|
189 |
+
In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs.
|
190 |
+
|
191 |
+
Examples
|
192 |
+
--------
|
193 |
+
>>> G1 = nx.Graph([(1, 2), (2, 3)])
|
194 |
+
>>> G2 = nx.Graph([(3, 4), (5, 6)])
|
195 |
+
>>> C = nx.compose_all([G1, G2])
|
196 |
+
>>> list(C.nodes())
|
197 |
+
[1, 2, 3, 4, 5, 6]
|
198 |
+
>>> list(C.edges())
|
199 |
+
[(1, 2), (2, 3), (3, 4), (5, 6)]
|
200 |
+
|
201 |
+
Notes
|
202 |
+
-----
|
203 |
+
For operating on mixed type graphs, they should be converted to the same type.
|
204 |
+
|
205 |
+
Graph, edge, and node attributes are propagated to the union graph.
|
206 |
+
If a graph attribute is present in multiple graphs, then the value
|
207 |
+
from the last graph in the list with that attribute is used.
|
208 |
+
"""
|
209 |
+
R = None
|
210 |
+
|
211 |
+
# add graph attributes, H attributes take precedent over G attributes
|
212 |
+
for i, G in enumerate(graphs):
|
213 |
+
if i == 0:
|
214 |
+
# create new graph
|
215 |
+
R = G.__class__()
|
216 |
+
elif G.is_directed() != R.is_directed():
|
217 |
+
raise nx.NetworkXError("All graphs must be directed or undirected.")
|
218 |
+
elif G.is_multigraph() != R.is_multigraph():
|
219 |
+
raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
|
220 |
+
|
221 |
+
R.graph.update(G.graph)
|
222 |
+
R.add_nodes_from(G.nodes(data=True))
|
223 |
+
R.add_edges_from(
|
224 |
+
G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True)
|
225 |
+
)
|
226 |
+
|
227 |
+
if R is None:
|
228 |
+
raise ValueError("cannot apply compose_all to an empty list")
|
229 |
+
|
230 |
+
return R
|
231 |
+
|
232 |
+
|
233 |
+
@nx._dispatchable(graphs="[graphs]", returns_graph=True)
|
234 |
+
def intersection_all(graphs):
|
235 |
+
"""Returns a new graph that contains only the nodes and the edges that exist in
|
236 |
+
all graphs.
|
237 |
+
|
238 |
+
Parameters
|
239 |
+
----------
|
240 |
+
graphs : iterable
|
241 |
+
Iterable of NetworkX graphs
|
242 |
+
|
243 |
+
Returns
|
244 |
+
-------
|
245 |
+
R : A new graph with the same type as the first graph in list
|
246 |
+
|
247 |
+
Raises
|
248 |
+
------
|
249 |
+
ValueError
|
250 |
+
If `graphs` is an empty list.
|
251 |
+
|
252 |
+
NetworkXError
|
253 |
+
In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs.
|
254 |
+
|
255 |
+
Notes
|
256 |
+
-----
|
257 |
+
For operating on mixed type graphs, they should be converted to the same type.
|
258 |
+
|
259 |
+
Attributes from the graph, nodes, and edges are not copied to the new
|
260 |
+
graph.
|
261 |
+
|
262 |
+
The resulting graph can be updated with attributes if desired. For example, code which adds the minimum attribute for each node across all graphs could work.
|
263 |
+
>>> g = nx.Graph()
|
264 |
+
>>> g.add_node(0, capacity=4)
|
265 |
+
>>> g.add_node(1, capacity=3)
|
266 |
+
>>> g.add_edge(0, 1)
|
267 |
+
|
268 |
+
>>> h = g.copy()
|
269 |
+
>>> h.nodes[0]["capacity"] = 2
|
270 |
+
|
271 |
+
>>> gh = nx.intersection_all([g, h])
|
272 |
+
|
273 |
+
>>> new_node_attr = {
|
274 |
+
... n: min(*(anyG.nodes[n].get("capacity", float("inf")) for anyG in [g, h]))
|
275 |
+
... for n in gh
|
276 |
+
... }
|
277 |
+
>>> nx.set_node_attributes(gh, new_node_attr, "new_capacity")
|
278 |
+
>>> gh.nodes(data=True)
|
279 |
+
NodeDataView({0: {'new_capacity': 2}, 1: {'new_capacity': 3}})
|
280 |
+
|
281 |
+
Examples
|
282 |
+
--------
|
283 |
+
>>> G1 = nx.Graph([(1, 2), (2, 3)])
|
284 |
+
>>> G2 = nx.Graph([(2, 3), (3, 4)])
|
285 |
+
>>> R = nx.intersection_all([G1, G2])
|
286 |
+
>>> list(R.nodes())
|
287 |
+
[2, 3]
|
288 |
+
>>> list(R.edges())
|
289 |
+
[(2, 3)]
|
290 |
+
|
291 |
+
"""
|
292 |
+
R = None
|
293 |
+
|
294 |
+
for i, G in enumerate(graphs):
|
295 |
+
G_nodes_set = set(G.nodes)
|
296 |
+
G_edges_set = set(G.edges)
|
297 |
+
if not G.is_directed():
|
298 |
+
if G.is_multigraph():
|
299 |
+
G_edges_set.update((v, u, k) for u, v, k in list(G_edges_set))
|
300 |
+
else:
|
301 |
+
G_edges_set.update((v, u) for u, v in list(G_edges_set))
|
302 |
+
if i == 0:
|
303 |
+
# create new graph
|
304 |
+
R = G.__class__()
|
305 |
+
node_intersection = G_nodes_set
|
306 |
+
edge_intersection = G_edges_set
|
307 |
+
elif G.is_directed() != R.is_directed():
|
308 |
+
raise nx.NetworkXError("All graphs must be directed or undirected.")
|
309 |
+
elif G.is_multigraph() != R.is_multigraph():
|
310 |
+
raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
|
311 |
+
else:
|
312 |
+
node_intersection &= G_nodes_set
|
313 |
+
edge_intersection &= G_edges_set
|
314 |
+
|
315 |
+
if R is None:
|
316 |
+
raise ValueError("cannot apply intersection_all to an empty list")
|
317 |
+
|
318 |
+
R.add_nodes_from(node_intersection)
|
319 |
+
R.add_edges_from(edge_intersection)
|
320 |
+
|
321 |
+
return R
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/binary.py
ADDED
@@ -0,0 +1,448 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Operations on graphs including union, intersection, difference.
|
3 |
+
"""
|
4 |
+
import networkx as nx
|
5 |
+
|
6 |
+
__all__ = [
|
7 |
+
"union",
|
8 |
+
"compose",
|
9 |
+
"disjoint_union",
|
10 |
+
"intersection",
|
11 |
+
"difference",
|
12 |
+
"symmetric_difference",
|
13 |
+
"full_join",
|
14 |
+
]
|
15 |
+
_G_H = {"G": 0, "H": 1}
|
16 |
+
|
17 |
+
|
18 |
+
@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True)
|
19 |
+
def union(G, H, rename=()):
|
20 |
+
"""Combine graphs G and H. The names of nodes must be unique.
|
21 |
+
|
22 |
+
A name collision between the graphs will raise an exception.
|
23 |
+
|
24 |
+
A renaming facility is provided to avoid name collisions.
|
25 |
+
|
26 |
+
|
27 |
+
Parameters
|
28 |
+
----------
|
29 |
+
G, H : graph
|
30 |
+
A NetworkX graph
|
31 |
+
|
32 |
+
rename : iterable , optional
|
33 |
+
Node names of G and H can be changed by specifying the tuple
|
34 |
+
rename=('G-','H-') (for example). Node "u" in G is then renamed
|
35 |
+
"G-u" and "v" in H is renamed "H-v".
|
36 |
+
|
37 |
+
Returns
|
38 |
+
-------
|
39 |
+
U : A union graph with the same type as G.
|
40 |
+
|
41 |
+
See Also
|
42 |
+
--------
|
43 |
+
compose
|
44 |
+
:func:`~networkx.Graph.update`
|
45 |
+
disjoint_union
|
46 |
+
|
47 |
+
Notes
|
48 |
+
-----
|
49 |
+
To combine graphs that have common nodes, consider compose(G, H)
|
50 |
+
or the method, Graph.update().
|
51 |
+
|
52 |
+
disjoint_union() is similar to union() except that it avoids name clashes
|
53 |
+
by relabeling the nodes with sequential integers.
|
54 |
+
|
55 |
+
Edge and node attributes are propagated from G and H to the union graph.
|
56 |
+
Graph attributes are also propagated, but if they are present in both G and H,
|
57 |
+
then the value from H is used.
|
58 |
+
|
59 |
+
Examples
|
60 |
+
--------
|
61 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
|
62 |
+
>>> H = nx.Graph([(0, 1), (0, 3), (1, 3), (1, 2)])
|
63 |
+
>>> U = nx.union(G, H, rename=("G", "H"))
|
64 |
+
>>> U.nodes
|
65 |
+
NodeView(('G0', 'G1', 'G2', 'H0', 'H1', 'H3', 'H2'))
|
66 |
+
>>> U.edges
|
67 |
+
EdgeView([('G0', 'G1'), ('G0', 'G2'), ('G1', 'G2'), ('H0', 'H1'), ('H0', 'H3'), ('H1', 'H3'), ('H1', 'H2')])
|
68 |
+
|
69 |
+
|
70 |
+
"""
|
71 |
+
return nx.union_all([G, H], rename)
|
72 |
+
|
73 |
+
|
74 |
+
@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True)
|
75 |
+
def disjoint_union(G, H):
|
76 |
+
"""Combine graphs G and H. The nodes are assumed to be unique (disjoint).
|
77 |
+
|
78 |
+
This algorithm automatically relabels nodes to avoid name collisions.
|
79 |
+
|
80 |
+
Parameters
|
81 |
+
----------
|
82 |
+
G,H : graph
|
83 |
+
A NetworkX graph
|
84 |
+
|
85 |
+
Returns
|
86 |
+
-------
|
87 |
+
U : A union graph with the same type as G.
|
88 |
+
|
89 |
+
See Also
|
90 |
+
--------
|
91 |
+
union
|
92 |
+
compose
|
93 |
+
:func:`~networkx.Graph.update`
|
94 |
+
|
95 |
+
Notes
|
96 |
+
-----
|
97 |
+
A new graph is created, of the same class as G. It is recommended
|
98 |
+
that G and H be either both directed or both undirected.
|
99 |
+
|
100 |
+
The nodes of G are relabeled 0 to len(G)-1, and the nodes of H are
|
101 |
+
relabeled len(G) to len(G)+len(H)-1.
|
102 |
+
|
103 |
+
Renumbering forces G and H to be disjoint, so no exception is ever raised for a name collision.
|
104 |
+
To preserve the check for common nodes, use union().
|
105 |
+
|
106 |
+
Edge and node attributes are propagated from G and H to the union graph.
|
107 |
+
Graph attributes are also propagated, but if they are present in both G and H,
|
108 |
+
then the value from H is used.
|
109 |
+
|
110 |
+
To combine graphs that have common nodes, consider compose(G, H)
|
111 |
+
or the method, Graph.update().
|
112 |
+
|
113 |
+
Examples
|
114 |
+
--------
|
115 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
|
116 |
+
>>> H = nx.Graph([(0, 3), (1, 2), (2, 3)])
|
117 |
+
>>> G.nodes[0]["key1"] = 5
|
118 |
+
>>> H.nodes[0]["key2"] = 10
|
119 |
+
>>> U = nx.disjoint_union(G, H)
|
120 |
+
>>> U.nodes(data=True)
|
121 |
+
NodeDataView({0: {'key1': 5}, 1: {}, 2: {}, 3: {'key2': 10}, 4: {}, 5: {}, 6: {}})
|
122 |
+
>>> U.edges
|
123 |
+
EdgeView([(0, 1), (0, 2), (1, 2), (3, 4), (4, 6), (5, 6)])
|
124 |
+
"""
|
125 |
+
return nx.disjoint_union_all([G, H])
|
126 |
+
|
127 |
+
|
128 |
+
@nx._dispatchable(graphs=_G_H, returns_graph=True)
|
129 |
+
def intersection(G, H):
|
130 |
+
"""Returns a new graph that contains only the nodes and the edges that exist in
|
131 |
+
both G and H.
|
132 |
+
|
133 |
+
Parameters
|
134 |
+
----------
|
135 |
+
G,H : graph
|
136 |
+
A NetworkX graph. G and H can have different node sets but must be both graphs or both multigraphs.
|
137 |
+
|
138 |
+
Raises
|
139 |
+
------
|
140 |
+
NetworkXError
|
141 |
+
If one is a MultiGraph and the other one is a graph.
|
142 |
+
|
143 |
+
Returns
|
144 |
+
-------
|
145 |
+
GH : A new graph with the same type as G.
|
146 |
+
|
147 |
+
Notes
|
148 |
+
-----
|
149 |
+
Attributes from the graph, nodes, and edges are not copied to the new
|
150 |
+
graph. If you want a new graph of the intersection of G and H
|
151 |
+
with the attributes (including edge data) from G use remove_nodes_from()
|
152 |
+
as follows
|
153 |
+
|
154 |
+
>>> G = nx.path_graph(3)
|
155 |
+
>>> H = nx.path_graph(5)
|
156 |
+
>>> R = G.copy()
|
157 |
+
>>> R.remove_nodes_from(n for n in G if n not in H)
|
158 |
+
>>> R.remove_edges_from(e for e in G.edges if e not in H.edges)
|
159 |
+
|
160 |
+
Examples
|
161 |
+
--------
|
162 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
|
163 |
+
>>> H = nx.Graph([(0, 3), (1, 2), (2, 3)])
|
164 |
+
>>> R = nx.intersection(G, H)
|
165 |
+
>>> R.nodes
|
166 |
+
NodeView((0, 1, 2))
|
167 |
+
>>> R.edges
|
168 |
+
EdgeView([(1, 2)])
|
169 |
+
"""
|
170 |
+
return nx.intersection_all([G, H])
|
171 |
+
|
172 |
+
|
173 |
+
@nx._dispatchable(graphs=_G_H, returns_graph=True)
|
174 |
+
def difference(G, H):
|
175 |
+
"""Returns a new graph that contains the edges that exist in G but not in H.
|
176 |
+
|
177 |
+
The node sets of H and G must be the same.
|
178 |
+
|
179 |
+
Parameters
|
180 |
+
----------
|
181 |
+
G,H : graph
|
182 |
+
A NetworkX graph. G and H must have the same node sets.
|
183 |
+
|
184 |
+
Returns
|
185 |
+
-------
|
186 |
+
D : A new graph with the same type as G.
|
187 |
+
|
188 |
+
Notes
|
189 |
+
-----
|
190 |
+
Attributes from the graph, nodes, and edges are not copied to the new
|
191 |
+
graph. If you want a new graph of the difference of G and H with
|
192 |
+
the attributes (including edge data) from G use remove_nodes_from()
|
193 |
+
as follows:
|
194 |
+
|
195 |
+
>>> G = nx.path_graph(3)
|
196 |
+
>>> H = nx.path_graph(5)
|
197 |
+
>>> R = G.copy()
|
198 |
+
>>> R.remove_nodes_from(n for n in G if n in H)
|
199 |
+
|
200 |
+
Examples
|
201 |
+
--------
|
202 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3)])
|
203 |
+
>>> H = nx.Graph([(0, 1), (1, 2), (0, 3)])
|
204 |
+
>>> R = nx.difference(G, H)
|
205 |
+
>>> R.nodes
|
206 |
+
NodeView((0, 1, 2, 3))
|
207 |
+
>>> R.edges
|
208 |
+
EdgeView([(0, 2), (1, 3)])
|
209 |
+
"""
|
210 |
+
# create new graph
|
211 |
+
if not G.is_multigraph() == H.is_multigraph():
|
212 |
+
raise nx.NetworkXError("G and H must both be graphs or multigraphs.")
|
213 |
+
R = nx.create_empty_copy(G, with_data=False)
|
214 |
+
|
215 |
+
if set(G) != set(H):
|
216 |
+
raise nx.NetworkXError("Node sets of graphs not equal")
|
217 |
+
|
218 |
+
if G.is_multigraph():
|
219 |
+
edges = G.edges(keys=True)
|
220 |
+
else:
|
221 |
+
edges = G.edges()
|
222 |
+
for e in edges:
|
223 |
+
if not H.has_edge(*e):
|
224 |
+
R.add_edge(*e)
|
225 |
+
return R
|
226 |
+
|
227 |
+
|
228 |
+
@nx._dispatchable(graphs=_G_H, returns_graph=True)
|
229 |
+
def symmetric_difference(G, H):
|
230 |
+
"""Returns new graph with edges that exist in either G or H but not both.
|
231 |
+
|
232 |
+
The node sets of H and G must be the same.
|
233 |
+
|
234 |
+
Parameters
|
235 |
+
----------
|
236 |
+
G,H : graph
|
237 |
+
A NetworkX graph. G and H must have the same node sets.
|
238 |
+
|
239 |
+
Returns
|
240 |
+
-------
|
241 |
+
D : A new graph with the same type as G.
|
242 |
+
|
243 |
+
Notes
|
244 |
+
-----
|
245 |
+
Attributes from the graph, nodes, and edges are not copied to the new
|
246 |
+
graph.
|
247 |
+
|
248 |
+
Examples
|
249 |
+
--------
|
250 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3)])
|
251 |
+
>>> H = nx.Graph([(0, 1), (1, 2), (0, 3)])
|
252 |
+
>>> R = nx.symmetric_difference(G, H)
|
253 |
+
>>> R.nodes
|
254 |
+
NodeView((0, 1, 2, 3))
|
255 |
+
>>> R.edges
|
256 |
+
EdgeView([(0, 2), (0, 3), (1, 3)])
|
257 |
+
"""
|
258 |
+
# create new graph
|
259 |
+
if not G.is_multigraph() == H.is_multigraph():
|
260 |
+
raise nx.NetworkXError("G and H must both be graphs or multigraphs.")
|
261 |
+
R = nx.create_empty_copy(G, with_data=False)
|
262 |
+
|
263 |
+
if set(G) != set(H):
|
264 |
+
raise nx.NetworkXError("Node sets of graphs not equal")
|
265 |
+
|
266 |
+
gnodes = set(G) # set of nodes in G
|
267 |
+
hnodes = set(H) # set of nodes in H
|
268 |
+
nodes = gnodes.symmetric_difference(hnodes)
|
269 |
+
R.add_nodes_from(nodes)
|
270 |
+
|
271 |
+
if G.is_multigraph():
|
272 |
+
edges = G.edges(keys=True)
|
273 |
+
else:
|
274 |
+
edges = G.edges()
|
275 |
+
# we could copy the data here but then this function doesn't
|
276 |
+
# match intersection and difference
|
277 |
+
for e in edges:
|
278 |
+
if not H.has_edge(*e):
|
279 |
+
R.add_edge(*e)
|
280 |
+
|
281 |
+
if H.is_multigraph():
|
282 |
+
edges = H.edges(keys=True)
|
283 |
+
else:
|
284 |
+
edges = H.edges()
|
285 |
+
for e in edges:
|
286 |
+
if not G.has_edge(*e):
|
287 |
+
R.add_edge(*e)
|
288 |
+
return R
|
289 |
+
|
290 |
+
|
291 |
+
@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True)
|
292 |
+
def compose(G, H):
|
293 |
+
"""Compose graph G with H by combining nodes and edges into a single graph.
|
294 |
+
|
295 |
+
The node sets and edges sets do not need to be disjoint.
|
296 |
+
|
297 |
+
Composing preserves the attributes of nodes and edges.
|
298 |
+
Attribute values from H take precedent over attribute values from G.
|
299 |
+
|
300 |
+
Parameters
|
301 |
+
----------
|
302 |
+
G, H : graph
|
303 |
+
A NetworkX graph
|
304 |
+
|
305 |
+
Returns
|
306 |
+
-------
|
307 |
+
C: A new graph with the same type as G
|
308 |
+
|
309 |
+
See Also
|
310 |
+
--------
|
311 |
+
:func:`~networkx.Graph.update`
|
312 |
+
union
|
313 |
+
disjoint_union
|
314 |
+
|
315 |
+
Notes
|
316 |
+
-----
|
317 |
+
It is recommended that G and H be either both directed or both undirected.
|
318 |
+
|
319 |
+
For MultiGraphs, the edges are identified by incident nodes AND edge-key.
|
320 |
+
This can cause surprises (i.e., edge `(1, 2)` may or may not be the same
|
321 |
+
in two graphs) if you use MultiGraph without keeping track of edge keys.
|
322 |
+
|
323 |
+
If combining the attributes of common nodes is not desired, consider union(),
|
324 |
+
which raises an exception for name collisions.
|
325 |
+
|
326 |
+
Examples
|
327 |
+
--------
|
328 |
+
>>> G = nx.Graph([(0, 1), (0, 2)])
|
329 |
+
>>> H = nx.Graph([(0, 1), (1, 2)])
|
330 |
+
>>> R = nx.compose(G, H)
|
331 |
+
>>> R.nodes
|
332 |
+
NodeView((0, 1, 2))
|
333 |
+
>>> R.edges
|
334 |
+
EdgeView([(0, 1), (0, 2), (1, 2)])
|
335 |
+
|
336 |
+
By default, the attributes from `H` take precedent over attributes from `G`.
|
337 |
+
If you prefer another way of combining attributes, you can update them after the compose operation:
|
338 |
+
|
339 |
+
>>> G = nx.Graph([(0, 1, {"weight": 2.0}), (3, 0, {"weight": 100.0})])
|
340 |
+
>>> H = nx.Graph([(0, 1, {"weight": 10.0}), (1, 2, {"weight": -1.0})])
|
341 |
+
>>> nx.set_node_attributes(G, {0: "dark", 1: "light", 3: "black"}, name="color")
|
342 |
+
>>> nx.set_node_attributes(H, {0: "green", 1: "orange", 2: "yellow"}, name="color")
|
343 |
+
>>> GcomposeH = nx.compose(G, H)
|
344 |
+
|
345 |
+
Normally, color attribute values of nodes of GcomposeH come from H. We can workaround this as follows:
|
346 |
+
|
347 |
+
>>> node_data = {
|
348 |
+
... n: G.nodes[n]["color"] + " " + H.nodes[n]["color"] for n in G.nodes & H.nodes
|
349 |
+
... }
|
350 |
+
>>> nx.set_node_attributes(GcomposeH, node_data, "color")
|
351 |
+
>>> print(GcomposeH.nodes[0]["color"])
|
352 |
+
dark green
|
353 |
+
|
354 |
+
>>> print(GcomposeH.nodes[3]["color"])
|
355 |
+
black
|
356 |
+
|
357 |
+
Similarly, we can update edge attributes after the compose operation in a way we prefer:
|
358 |
+
|
359 |
+
>>> edge_data = {
|
360 |
+
... e: G.edges[e]["weight"] * H.edges[e]["weight"] for e in G.edges & H.edges
|
361 |
+
... }
|
362 |
+
>>> nx.set_edge_attributes(GcomposeH, edge_data, "weight")
|
363 |
+
>>> print(GcomposeH.edges[(0, 1)]["weight"])
|
364 |
+
20.0
|
365 |
+
|
366 |
+
>>> print(GcomposeH.edges[(3, 0)]["weight"])
|
367 |
+
100.0
|
368 |
+
"""
|
369 |
+
return nx.compose_all([G, H])
|
370 |
+
|
371 |
+
|
372 |
+
@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True)
|
373 |
+
def full_join(G, H, rename=(None, None)):
|
374 |
+
"""Returns the full join of graphs G and H.
|
375 |
+
|
376 |
+
Full join is the union of G and H in which all edges between
|
377 |
+
G and H are added.
|
378 |
+
The node sets of G and H must be disjoint,
|
379 |
+
otherwise an exception is raised.
|
380 |
+
|
381 |
+
Parameters
|
382 |
+
----------
|
383 |
+
G, H : graph
|
384 |
+
A NetworkX graph
|
385 |
+
|
386 |
+
rename : tuple , default=(None, None)
|
387 |
+
Node names of G and H can be changed by specifying the tuple
|
388 |
+
rename=('G-','H-') (for example). Node "u" in G is then renamed
|
389 |
+
"G-u" and "v" in H is renamed "H-v".
|
390 |
+
|
391 |
+
Returns
|
392 |
+
-------
|
393 |
+
U : The full join graph with the same type as G.
|
394 |
+
|
395 |
+
Notes
|
396 |
+
-----
|
397 |
+
It is recommended that G and H be either both directed or both undirected.
|
398 |
+
|
399 |
+
If G is directed, then edges from G to H are added as well as from H to G.
|
400 |
+
|
401 |
+
Note that full_join() does not produce parallel edges for MultiGraphs.
|
402 |
+
|
403 |
+
The full join operation of graphs G and H is the same as getting
|
404 |
+
their complement, performing a disjoint union, and finally getting
|
405 |
+
the complement of the resulting graph.
|
406 |
+
|
407 |
+
Graph, edge, and node attributes are propagated from G and H
|
408 |
+
to the union graph. If a graph attribute is present in both
|
409 |
+
G and H the value from H is used.
|
410 |
+
|
411 |
+
Examples
|
412 |
+
--------
|
413 |
+
>>> G = nx.Graph([(0, 1), (0, 2)])
|
414 |
+
>>> H = nx.Graph([(3, 4)])
|
415 |
+
>>> R = nx.full_join(G, H, rename=("G", "H"))
|
416 |
+
>>> R.nodes
|
417 |
+
NodeView(('G0', 'G1', 'G2', 'H3', 'H4'))
|
418 |
+
>>> R.edges
|
419 |
+
EdgeView([('G0', 'G1'), ('G0', 'G2'), ('G0', 'H3'), ('G0', 'H4'), ('G1', 'H3'), ('G1', 'H4'), ('G2', 'H3'), ('G2', 'H4'), ('H3', 'H4')])
|
420 |
+
|
421 |
+
See Also
|
422 |
+
--------
|
423 |
+
union
|
424 |
+
disjoint_union
|
425 |
+
"""
|
426 |
+
R = union(G, H, rename)
|
427 |
+
|
428 |
+
def add_prefix(graph, prefix):
|
429 |
+
if prefix is None:
|
430 |
+
return graph
|
431 |
+
|
432 |
+
def label(x):
|
433 |
+
return f"{prefix}{x}"
|
434 |
+
|
435 |
+
return nx.relabel_nodes(graph, label)
|
436 |
+
|
437 |
+
G = add_prefix(G, rename[0])
|
438 |
+
H = add_prefix(H, rename[1])
|
439 |
+
|
440 |
+
for i in G:
|
441 |
+
for j in H:
|
442 |
+
R.add_edge(i, j)
|
443 |
+
if R.is_directed():
|
444 |
+
for i in H:
|
445 |
+
for j in G:
|
446 |
+
R.add_edge(i, j)
|
447 |
+
|
448 |
+
return R
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/product.py
ADDED
@@ -0,0 +1,630 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Graph products.
|
3 |
+
"""
|
4 |
+
from itertools import product
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils import not_implemented_for
|
8 |
+
|
9 |
+
__all__ = [
|
10 |
+
"tensor_product",
|
11 |
+
"cartesian_product",
|
12 |
+
"lexicographic_product",
|
13 |
+
"strong_product",
|
14 |
+
"power",
|
15 |
+
"rooted_product",
|
16 |
+
"corona_product",
|
17 |
+
"modular_product",
|
18 |
+
]
|
19 |
+
_G_H = {"G": 0, "H": 1}
|
20 |
+
|
21 |
+
|
22 |
+
def _dict_product(d1, d2):
|
23 |
+
return {k: (d1.get(k), d2.get(k)) for k in set(d1) | set(d2)}
|
24 |
+
|
25 |
+
|
26 |
+
# Generators for producing graph products
|
27 |
+
def _node_product(G, H):
|
28 |
+
for u, v in product(G, H):
|
29 |
+
yield ((u, v), _dict_product(G.nodes[u], H.nodes[v]))
|
30 |
+
|
31 |
+
|
32 |
+
def _directed_edges_cross_edges(G, H):
|
33 |
+
if not G.is_multigraph() and not H.is_multigraph():
|
34 |
+
for u, v, c in G.edges(data=True):
|
35 |
+
for x, y, d in H.edges(data=True):
|
36 |
+
yield (u, x), (v, y), _dict_product(c, d)
|
37 |
+
if not G.is_multigraph() and H.is_multigraph():
|
38 |
+
for u, v, c in G.edges(data=True):
|
39 |
+
for x, y, k, d in H.edges(data=True, keys=True):
|
40 |
+
yield (u, x), (v, y), k, _dict_product(c, d)
|
41 |
+
if G.is_multigraph() and not H.is_multigraph():
|
42 |
+
for u, v, k, c in G.edges(data=True, keys=True):
|
43 |
+
for x, y, d in H.edges(data=True):
|
44 |
+
yield (u, x), (v, y), k, _dict_product(c, d)
|
45 |
+
if G.is_multigraph() and H.is_multigraph():
|
46 |
+
for u, v, j, c in G.edges(data=True, keys=True):
|
47 |
+
for x, y, k, d in H.edges(data=True, keys=True):
|
48 |
+
yield (u, x), (v, y), (j, k), _dict_product(c, d)
|
49 |
+
|
50 |
+
|
51 |
+
def _undirected_edges_cross_edges(G, H):
|
52 |
+
if not G.is_multigraph() and not H.is_multigraph():
|
53 |
+
for u, v, c in G.edges(data=True):
|
54 |
+
for x, y, d in H.edges(data=True):
|
55 |
+
yield (v, x), (u, y), _dict_product(c, d)
|
56 |
+
if not G.is_multigraph() and H.is_multigraph():
|
57 |
+
for u, v, c in G.edges(data=True):
|
58 |
+
for x, y, k, d in H.edges(data=True, keys=True):
|
59 |
+
yield (v, x), (u, y), k, _dict_product(c, d)
|
60 |
+
if G.is_multigraph() and not H.is_multigraph():
|
61 |
+
for u, v, k, c in G.edges(data=True, keys=True):
|
62 |
+
for x, y, d in H.edges(data=True):
|
63 |
+
yield (v, x), (u, y), k, _dict_product(c, d)
|
64 |
+
if G.is_multigraph() and H.is_multigraph():
|
65 |
+
for u, v, j, c in G.edges(data=True, keys=True):
|
66 |
+
for x, y, k, d in H.edges(data=True, keys=True):
|
67 |
+
yield (v, x), (u, y), (j, k), _dict_product(c, d)
|
68 |
+
|
69 |
+
|
70 |
+
def _edges_cross_nodes(G, H):
|
71 |
+
if G.is_multigraph():
|
72 |
+
for u, v, k, d in G.edges(data=True, keys=True):
|
73 |
+
for x in H:
|
74 |
+
yield (u, x), (v, x), k, d
|
75 |
+
else:
|
76 |
+
for u, v, d in G.edges(data=True):
|
77 |
+
for x in H:
|
78 |
+
if H.is_multigraph():
|
79 |
+
yield (u, x), (v, x), None, d
|
80 |
+
else:
|
81 |
+
yield (u, x), (v, x), d
|
82 |
+
|
83 |
+
|
84 |
+
def _nodes_cross_edges(G, H):
|
85 |
+
if H.is_multigraph():
|
86 |
+
for x in G:
|
87 |
+
for u, v, k, d in H.edges(data=True, keys=True):
|
88 |
+
yield (x, u), (x, v), k, d
|
89 |
+
else:
|
90 |
+
for x in G:
|
91 |
+
for u, v, d in H.edges(data=True):
|
92 |
+
if G.is_multigraph():
|
93 |
+
yield (x, u), (x, v), None, d
|
94 |
+
else:
|
95 |
+
yield (x, u), (x, v), d
|
96 |
+
|
97 |
+
|
98 |
+
def _edges_cross_nodes_and_nodes(G, H):
|
99 |
+
if G.is_multigraph():
|
100 |
+
for u, v, k, d in G.edges(data=True, keys=True):
|
101 |
+
for x in H:
|
102 |
+
for y in H:
|
103 |
+
yield (u, x), (v, y), k, d
|
104 |
+
else:
|
105 |
+
for u, v, d in G.edges(data=True):
|
106 |
+
for x in H:
|
107 |
+
for y in H:
|
108 |
+
if H.is_multigraph():
|
109 |
+
yield (u, x), (v, y), None, d
|
110 |
+
else:
|
111 |
+
yield (u, x), (v, y), d
|
112 |
+
|
113 |
+
|
114 |
+
def _init_product_graph(G, H):
|
115 |
+
if G.is_directed() != H.is_directed():
|
116 |
+
msg = "G and H must be both directed or both undirected"
|
117 |
+
raise nx.NetworkXError(msg)
|
118 |
+
if G.is_multigraph() or H.is_multigraph():
|
119 |
+
GH = nx.MultiGraph()
|
120 |
+
else:
|
121 |
+
GH = nx.Graph()
|
122 |
+
if G.is_directed():
|
123 |
+
GH = GH.to_directed()
|
124 |
+
return GH
|
125 |
+
|
126 |
+
|
127 |
+
@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True)
|
128 |
+
def tensor_product(G, H):
|
129 |
+
r"""Returns the tensor product of G and H.
|
130 |
+
|
131 |
+
The tensor product $P$ of the graphs $G$ and $H$ has a node set that
|
132 |
+
is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$.
|
133 |
+
$P$ has an edge $((u,v), (x,y))$ if and only if $(u,x)$ is an edge in $G$
|
134 |
+
and $(v,y)$ is an edge in $H$.
|
135 |
+
|
136 |
+
Tensor product is sometimes also referred to as the categorical product,
|
137 |
+
direct product, cardinal product or conjunction.
|
138 |
+
|
139 |
+
|
140 |
+
Parameters
|
141 |
+
----------
|
142 |
+
G, H: graphs
|
143 |
+
Networkx graphs.
|
144 |
+
|
145 |
+
Returns
|
146 |
+
-------
|
147 |
+
P: NetworkX graph
|
148 |
+
The tensor product of G and H. P will be a multi-graph if either G
|
149 |
+
or H is a multi-graph, will be a directed if G and H are directed,
|
150 |
+
and undirected if G and H are undirected.
|
151 |
+
|
152 |
+
Raises
|
153 |
+
------
|
154 |
+
NetworkXError
|
155 |
+
If G and H are not both directed or both undirected.
|
156 |
+
|
157 |
+
Notes
|
158 |
+
-----
|
159 |
+
Node attributes in P are two-tuple of the G and H node attributes.
|
160 |
+
Missing attributes are assigned None.
|
161 |
+
|
162 |
+
Examples
|
163 |
+
--------
|
164 |
+
>>> G = nx.Graph()
|
165 |
+
>>> H = nx.Graph()
|
166 |
+
>>> G.add_node(0, a1=True)
|
167 |
+
>>> H.add_node("a", a2="Spam")
|
168 |
+
>>> P = nx.tensor_product(G, H)
|
169 |
+
>>> list(P)
|
170 |
+
[(0, 'a')]
|
171 |
+
|
172 |
+
Edge attributes and edge keys (for multigraphs) are also copied to the
|
173 |
+
new product graph
|
174 |
+
"""
|
175 |
+
GH = _init_product_graph(G, H)
|
176 |
+
GH.add_nodes_from(_node_product(G, H))
|
177 |
+
GH.add_edges_from(_directed_edges_cross_edges(G, H))
|
178 |
+
if not GH.is_directed():
|
179 |
+
GH.add_edges_from(_undirected_edges_cross_edges(G, H))
|
180 |
+
return GH
|
181 |
+
|
182 |
+
|
183 |
+
@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True)
|
184 |
+
def cartesian_product(G, H):
|
185 |
+
r"""Returns the Cartesian product of G and H.
|
186 |
+
|
187 |
+
The Cartesian product $P$ of the graphs $G$ and $H$ has a node set that
|
188 |
+
is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$.
|
189 |
+
$P$ has an edge $((u,v),(x,y))$ if and only if either $u$ is equal to $x$
|
190 |
+
and both $v$ and $y$ are adjacent in $H$ or if $v$ is equal to $y$ and
|
191 |
+
both $u$ and $x$ are adjacent in $G$.
|
192 |
+
|
193 |
+
Parameters
|
194 |
+
----------
|
195 |
+
G, H: graphs
|
196 |
+
Networkx graphs.
|
197 |
+
|
198 |
+
Returns
|
199 |
+
-------
|
200 |
+
P: NetworkX graph
|
201 |
+
The Cartesian product of G and H. P will be a multi-graph if either G
|
202 |
+
or H is a multi-graph. Will be a directed if G and H are directed,
|
203 |
+
and undirected if G and H are undirected.
|
204 |
+
|
205 |
+
Raises
|
206 |
+
------
|
207 |
+
NetworkXError
|
208 |
+
If G and H are not both directed or both undirected.
|
209 |
+
|
210 |
+
Notes
|
211 |
+
-----
|
212 |
+
Node attributes in P are two-tuple of the G and H node attributes.
|
213 |
+
Missing attributes are assigned None.
|
214 |
+
|
215 |
+
Examples
|
216 |
+
--------
|
217 |
+
>>> G = nx.Graph()
|
218 |
+
>>> H = nx.Graph()
|
219 |
+
>>> G.add_node(0, a1=True)
|
220 |
+
>>> H.add_node("a", a2="Spam")
|
221 |
+
>>> P = nx.cartesian_product(G, H)
|
222 |
+
>>> list(P)
|
223 |
+
[(0, 'a')]
|
224 |
+
|
225 |
+
Edge attributes and edge keys (for multigraphs) are also copied to the
|
226 |
+
new product graph
|
227 |
+
"""
|
228 |
+
GH = _init_product_graph(G, H)
|
229 |
+
GH.add_nodes_from(_node_product(G, H))
|
230 |
+
GH.add_edges_from(_edges_cross_nodes(G, H))
|
231 |
+
GH.add_edges_from(_nodes_cross_edges(G, H))
|
232 |
+
return GH
|
233 |
+
|
234 |
+
|
235 |
+
@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True)
|
236 |
+
def lexicographic_product(G, H):
|
237 |
+
r"""Returns the lexicographic product of G and H.
|
238 |
+
|
239 |
+
The lexicographical product $P$ of the graphs $G$ and $H$ has a node set
|
240 |
+
that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$.
|
241 |
+
$P$ has an edge $((u,v), (x,y))$ if and only if $(u,v)$ is an edge in $G$
|
242 |
+
or $u==v$ and $(x,y)$ is an edge in $H$.
|
243 |
+
|
244 |
+
Parameters
|
245 |
+
----------
|
246 |
+
G, H: graphs
|
247 |
+
Networkx graphs.
|
248 |
+
|
249 |
+
Returns
|
250 |
+
-------
|
251 |
+
P: NetworkX graph
|
252 |
+
The Cartesian product of G and H. P will be a multi-graph if either G
|
253 |
+
or H is a multi-graph. Will be a directed if G and H are directed,
|
254 |
+
and undirected if G and H are undirected.
|
255 |
+
|
256 |
+
Raises
|
257 |
+
------
|
258 |
+
NetworkXError
|
259 |
+
If G and H are not both directed or both undirected.
|
260 |
+
|
261 |
+
Notes
|
262 |
+
-----
|
263 |
+
Node attributes in P are two-tuple of the G and H node attributes.
|
264 |
+
Missing attributes are assigned None.
|
265 |
+
|
266 |
+
Examples
|
267 |
+
--------
|
268 |
+
>>> G = nx.Graph()
|
269 |
+
>>> H = nx.Graph()
|
270 |
+
>>> G.add_node(0, a1=True)
|
271 |
+
>>> H.add_node("a", a2="Spam")
|
272 |
+
>>> P = nx.lexicographic_product(G, H)
|
273 |
+
>>> list(P)
|
274 |
+
[(0, 'a')]
|
275 |
+
|
276 |
+
Edge attributes and edge keys (for multigraphs) are also copied to the
|
277 |
+
new product graph
|
278 |
+
"""
|
279 |
+
GH = _init_product_graph(G, H)
|
280 |
+
GH.add_nodes_from(_node_product(G, H))
|
281 |
+
# Edges in G regardless of H designation
|
282 |
+
GH.add_edges_from(_edges_cross_nodes_and_nodes(G, H))
|
283 |
+
# For each x in G, only if there is an edge in H
|
284 |
+
GH.add_edges_from(_nodes_cross_edges(G, H))
|
285 |
+
return GH
|
286 |
+
|
287 |
+
|
288 |
+
@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True)
|
289 |
+
def strong_product(G, H):
|
290 |
+
r"""Returns the strong product of G and H.
|
291 |
+
|
292 |
+
The strong product $P$ of the graphs $G$ and $H$ has a node set that
|
293 |
+
is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$.
|
294 |
+
$P$ has an edge $((u,v), (x,y))$ if and only if
|
295 |
+
$u==v$ and $(x,y)$ is an edge in $H$, or
|
296 |
+
$x==y$ and $(u,v)$ is an edge in $G$, or
|
297 |
+
$(u,v)$ is an edge in $G$ and $(x,y)$ is an edge in $H$.
|
298 |
+
|
299 |
+
Parameters
|
300 |
+
----------
|
301 |
+
G, H: graphs
|
302 |
+
Networkx graphs.
|
303 |
+
|
304 |
+
Returns
|
305 |
+
-------
|
306 |
+
P: NetworkX graph
|
307 |
+
The Cartesian product of G and H. P will be a multi-graph if either G
|
308 |
+
or H is a multi-graph. Will be a directed if G and H are directed,
|
309 |
+
and undirected if G and H are undirected.
|
310 |
+
|
311 |
+
Raises
|
312 |
+
------
|
313 |
+
NetworkXError
|
314 |
+
If G and H are not both directed or both undirected.
|
315 |
+
|
316 |
+
Notes
|
317 |
+
-----
|
318 |
+
Node attributes in P are two-tuple of the G and H node attributes.
|
319 |
+
Missing attributes are assigned None.
|
320 |
+
|
321 |
+
Examples
|
322 |
+
--------
|
323 |
+
>>> G = nx.Graph()
|
324 |
+
>>> H = nx.Graph()
|
325 |
+
>>> G.add_node(0, a1=True)
|
326 |
+
>>> H.add_node("a", a2="Spam")
|
327 |
+
>>> P = nx.strong_product(G, H)
|
328 |
+
>>> list(P)
|
329 |
+
[(0, 'a')]
|
330 |
+
|
331 |
+
Edge attributes and edge keys (for multigraphs) are also copied to the
|
332 |
+
new product graph
|
333 |
+
"""
|
334 |
+
GH = _init_product_graph(G, H)
|
335 |
+
GH.add_nodes_from(_node_product(G, H))
|
336 |
+
GH.add_edges_from(_nodes_cross_edges(G, H))
|
337 |
+
GH.add_edges_from(_edges_cross_nodes(G, H))
|
338 |
+
GH.add_edges_from(_directed_edges_cross_edges(G, H))
|
339 |
+
if not GH.is_directed():
|
340 |
+
GH.add_edges_from(_undirected_edges_cross_edges(G, H))
|
341 |
+
return GH
|
342 |
+
|
343 |
+
|
344 |
+
@not_implemented_for("directed")
|
345 |
+
@not_implemented_for("multigraph")
|
346 |
+
@nx._dispatchable(returns_graph=True)
|
347 |
+
def power(G, k):
|
348 |
+
"""Returns the specified power of a graph.
|
349 |
+
|
350 |
+
The $k$th power of a simple graph $G$, denoted $G^k$, is a
|
351 |
+
graph on the same set of nodes in which two distinct nodes $u$ and
|
352 |
+
$v$ are adjacent in $G^k$ if and only if the shortest path
|
353 |
+
distance between $u$ and $v$ in $G$ is at most $k$.
|
354 |
+
|
355 |
+
Parameters
|
356 |
+
----------
|
357 |
+
G : graph
|
358 |
+
A NetworkX simple graph object.
|
359 |
+
|
360 |
+
k : positive integer
|
361 |
+
The power to which to raise the graph `G`.
|
362 |
+
|
363 |
+
Returns
|
364 |
+
-------
|
365 |
+
NetworkX simple graph
|
366 |
+
`G` to the power `k`.
|
367 |
+
|
368 |
+
Raises
|
369 |
+
------
|
370 |
+
ValueError
|
371 |
+
If the exponent `k` is not positive.
|
372 |
+
|
373 |
+
NetworkXNotImplemented
|
374 |
+
If `G` is not a simple graph.
|
375 |
+
|
376 |
+
Examples
|
377 |
+
--------
|
378 |
+
The number of edges will never decrease when taking successive
|
379 |
+
powers:
|
380 |
+
|
381 |
+
>>> G = nx.path_graph(4)
|
382 |
+
>>> list(nx.power(G, 2).edges)
|
383 |
+
[(0, 1), (0, 2), (1, 2), (1, 3), (2, 3)]
|
384 |
+
>>> list(nx.power(G, 3).edges)
|
385 |
+
[(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]
|
386 |
+
|
387 |
+
The `k` th power of a cycle graph on *n* nodes is the complete graph
|
388 |
+
on *n* nodes, if `k` is at least ``n // 2``:
|
389 |
+
|
390 |
+
>>> G = nx.cycle_graph(5)
|
391 |
+
>>> H = nx.complete_graph(5)
|
392 |
+
>>> nx.is_isomorphic(nx.power(G, 2), H)
|
393 |
+
True
|
394 |
+
>>> G = nx.cycle_graph(8)
|
395 |
+
>>> H = nx.complete_graph(8)
|
396 |
+
>>> nx.is_isomorphic(nx.power(G, 4), H)
|
397 |
+
True
|
398 |
+
|
399 |
+
References
|
400 |
+
----------
|
401 |
+
.. [1] J. A. Bondy, U. S. R. Murty, *Graph Theory*. Springer, 2008.
|
402 |
+
|
403 |
+
Notes
|
404 |
+
-----
|
405 |
+
This definition of "power graph" comes from Exercise 3.1.6 of
|
406 |
+
*Graph Theory* by Bondy and Murty [1]_.
|
407 |
+
|
408 |
+
"""
|
409 |
+
if k <= 0:
|
410 |
+
raise ValueError("k must be a positive integer")
|
411 |
+
H = nx.Graph()
|
412 |
+
H.add_nodes_from(G)
|
413 |
+
# update BFS code to ignore self loops.
|
414 |
+
for n in G:
|
415 |
+
seen = {} # level (number of hops) when seen in BFS
|
416 |
+
level = 1 # the current level
|
417 |
+
nextlevel = G[n]
|
418 |
+
while nextlevel:
|
419 |
+
thislevel = nextlevel # advance to next level
|
420 |
+
nextlevel = {} # and start a new list (fringe)
|
421 |
+
for v in thislevel:
|
422 |
+
if v == n: # avoid self loop
|
423 |
+
continue
|
424 |
+
if v not in seen:
|
425 |
+
seen[v] = level # set the level of vertex v
|
426 |
+
nextlevel.update(G[v]) # add neighbors of v
|
427 |
+
if k <= level:
|
428 |
+
break
|
429 |
+
level += 1
|
430 |
+
H.add_edges_from((n, nbr) for nbr in seen)
|
431 |
+
return H
|
432 |
+
|
433 |
+
|
434 |
+
@not_implemented_for("multigraph")
|
435 |
+
@nx._dispatchable(graphs=_G_H, returns_graph=True)
|
436 |
+
def rooted_product(G, H, root):
|
437 |
+
"""Return the rooted product of graphs G and H rooted at root in H.
|
438 |
+
|
439 |
+
A new graph is constructed representing the rooted product of
|
440 |
+
the inputted graphs, G and H, with a root in H.
|
441 |
+
A rooted product duplicates H for each nodes in G with the root
|
442 |
+
of H corresponding to the node in G. Nodes are renamed as the direct
|
443 |
+
product of G and H. The result is a subgraph of the cartesian product.
|
444 |
+
|
445 |
+
Parameters
|
446 |
+
----------
|
447 |
+
G,H : graph
|
448 |
+
A NetworkX graph
|
449 |
+
root : node
|
450 |
+
A node in H
|
451 |
+
|
452 |
+
Returns
|
453 |
+
-------
|
454 |
+
R : The rooted product of G and H with a specified root in H
|
455 |
+
|
456 |
+
Notes
|
457 |
+
-----
|
458 |
+
The nodes of R are the Cartesian Product of the nodes of G and H.
|
459 |
+
The nodes of G and H are not relabeled.
|
460 |
+
"""
|
461 |
+
if root not in H:
|
462 |
+
raise nx.NetworkXError("root must be a vertex in H")
|
463 |
+
|
464 |
+
R = nx.Graph()
|
465 |
+
R.add_nodes_from(product(G, H))
|
466 |
+
|
467 |
+
R.add_edges_from(((e[0], root), (e[1], root)) for e in G.edges())
|
468 |
+
R.add_edges_from(((g, e[0]), (g, e[1])) for g in G for e in H.edges())
|
469 |
+
|
470 |
+
return R
|
471 |
+
|
472 |
+
|
473 |
+
@not_implemented_for("directed")
|
474 |
+
@not_implemented_for("multigraph")
|
475 |
+
@nx._dispatchable(graphs=_G_H, returns_graph=True)
|
476 |
+
def corona_product(G, H):
|
477 |
+
r"""Returns the Corona product of G and H.
|
478 |
+
|
479 |
+
The corona product of $G$ and $H$ is the graph $C = G \circ H$ obtained by
|
480 |
+
taking one copy of $G$, called the center graph, $|V(G)|$ copies of $H$,
|
481 |
+
called the outer graph, and making the $i$-th vertex of $G$ adjacent to
|
482 |
+
every vertex of the $i$-th copy of $H$, where $1 ≤ i ≤ |V(G)|$.
|
483 |
+
|
484 |
+
Parameters
|
485 |
+
----------
|
486 |
+
G, H: NetworkX graphs
|
487 |
+
The graphs to take the carona product of.
|
488 |
+
`G` is the center graph and `H` is the outer graph
|
489 |
+
|
490 |
+
Returns
|
491 |
+
-------
|
492 |
+
C: NetworkX graph
|
493 |
+
The Corona product of G and H.
|
494 |
+
|
495 |
+
Raises
|
496 |
+
------
|
497 |
+
NetworkXError
|
498 |
+
If G and H are not both directed or both undirected.
|
499 |
+
|
500 |
+
Examples
|
501 |
+
--------
|
502 |
+
>>> G = nx.cycle_graph(4)
|
503 |
+
>>> H = nx.path_graph(2)
|
504 |
+
>>> C = nx.corona_product(G, H)
|
505 |
+
>>> list(C)
|
506 |
+
[0, 1, 2, 3, (0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)]
|
507 |
+
>>> print(C)
|
508 |
+
Graph with 12 nodes and 16 edges
|
509 |
+
|
510 |
+
References
|
511 |
+
----------
|
512 |
+
[1] M. Tavakoli, F. Rahbarnia, and A. R. Ashrafi,
|
513 |
+
"Studying the corona product of graphs under some graph invariants,"
|
514 |
+
Transactions on Combinatorics, vol. 3, no. 3, pp. 43–49, Sep. 2014,
|
515 |
+
doi: 10.22108/toc.2014.5542.
|
516 |
+
[2] A. Faraji, "Corona Product in Graph Theory," Ali Faraji, May 11, 2021.
|
517 |
+
https://blog.alifaraji.ir/math/graph-theory/corona-product.html (accessed Dec. 07, 2021).
|
518 |
+
"""
|
519 |
+
GH = _init_product_graph(G, H)
|
520 |
+
GH.add_nodes_from(G)
|
521 |
+
GH.add_edges_from(G.edges)
|
522 |
+
|
523 |
+
for G_node in G:
|
524 |
+
# copy nodes of H in GH, call it H_i
|
525 |
+
GH.add_nodes_from((G_node, v) for v in H)
|
526 |
+
|
527 |
+
# copy edges of H_i based on H
|
528 |
+
GH.add_edges_from(
|
529 |
+
((G_node, e0), (G_node, e1), d) for e0, e1, d in H.edges.data()
|
530 |
+
)
|
531 |
+
|
532 |
+
# creating new edges between H_i and a G's node
|
533 |
+
GH.add_edges_from((G_node, (G_node, H_node)) for H_node in H)
|
534 |
+
|
535 |
+
return GH
|
536 |
+
|
537 |
+
|
538 |
+
@nx._dispatchable(
|
539 |
+
graphs=_G_H, preserve_edge_attrs=True, preserve_node_attrs=True, returns_graph=True
|
540 |
+
)
|
541 |
+
def modular_product(G, H):
|
542 |
+
r"""Returns the Modular product of G and H.
|
543 |
+
|
544 |
+
The modular product of `G` and `H` is the graph $M = G \nabla H$,
|
545 |
+
consisting of the node set $V(M) = V(G) \times V(H)$ that is the Cartesian
|
546 |
+
product of the node sets of `G` and `H`. Further, M contains an edge ((u, v), (x, y)):
|
547 |
+
|
548 |
+
- if u is adjacent to x in `G` and v is adjacent to y in `H`, or
|
549 |
+
- if u is not adjacent to x in `G` and v is not adjacent to y in `H`.
|
550 |
+
|
551 |
+
More formally::
|
552 |
+
|
553 |
+
E(M) = {((u, v), (x, y)) | ((u, x) in E(G) and (v, y) in E(H)) or
|
554 |
+
((u, x) not in E(G) and (v, y) not in E(H))}
|
555 |
+
|
556 |
+
Parameters
|
557 |
+
----------
|
558 |
+
G, H: NetworkX graphs
|
559 |
+
The graphs to take the modular product of.
|
560 |
+
|
561 |
+
Returns
|
562 |
+
-------
|
563 |
+
M: NetworkX graph
|
564 |
+
The Modular product of `G` and `H`.
|
565 |
+
|
566 |
+
Raises
|
567 |
+
------
|
568 |
+
NetworkXNotImplemented
|
569 |
+
If `G` is not a simple graph.
|
570 |
+
|
571 |
+
Examples
|
572 |
+
--------
|
573 |
+
>>> G = nx.cycle_graph(4)
|
574 |
+
>>> H = nx.path_graph(2)
|
575 |
+
>>> M = nx.modular_product(G, H)
|
576 |
+
>>> list(M)
|
577 |
+
[(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)]
|
578 |
+
>>> print(M)
|
579 |
+
Graph with 8 nodes and 8 edges
|
580 |
+
|
581 |
+
Notes
|
582 |
+
-----
|
583 |
+
The *modular product* is defined in [1]_ and was first
|
584 |
+
introduced as the *weak modular product*.
|
585 |
+
|
586 |
+
The modular product reduces the problem of counting isomorphic subgraphs
|
587 |
+
in `G` and `H` to the problem of counting cliques in M. The subgraphs of
|
588 |
+
`G` and `H` that are induced by the nodes of a clique in M are
|
589 |
+
isomorphic [2]_ [3]_.
|
590 |
+
|
591 |
+
References
|
592 |
+
----------
|
593 |
+
.. [1] R. Hammack, W. Imrich, and S. Klavžar,
|
594 |
+
"Handbook of Product Graphs", CRC Press, 2011.
|
595 |
+
|
596 |
+
.. [2] H. G. Barrow and R. M. Burstall,
|
597 |
+
"Subgraph isomorphism, matching relational structures and maximal
|
598 |
+
cliques", Information Processing Letters, vol. 4, issue 4, pp. 83-84,
|
599 |
+
1976, https://doi.org/10.1016/0020-0190(76)90049-1.
|
600 |
+
|
601 |
+
.. [3] V. G. Vizing, "Reduction of the problem of isomorphism and isomorphic
|
602 |
+
entrance to the task of finding the nondensity of a graph." Proc. Third
|
603 |
+
All-Union Conference on Problems of Theoretical Cybernetics. 1974.
|
604 |
+
"""
|
605 |
+
if G.is_directed() or H.is_directed():
|
606 |
+
raise nx.NetworkXNotImplemented(
|
607 |
+
"Modular product not implemented for directed graphs"
|
608 |
+
)
|
609 |
+
if G.is_multigraph() or H.is_multigraph():
|
610 |
+
raise nx.NetworkXNotImplemented(
|
611 |
+
"Modular product not implemented for multigraphs"
|
612 |
+
)
|
613 |
+
|
614 |
+
GH = _init_product_graph(G, H)
|
615 |
+
GH.add_nodes_from(_node_product(G, H))
|
616 |
+
|
617 |
+
for u, v, c in G.edges(data=True):
|
618 |
+
for x, y, d in H.edges(data=True):
|
619 |
+
GH.add_edge((u, x), (v, y), **_dict_product(c, d))
|
620 |
+
GH.add_edge((v, x), (u, y), **_dict_product(c, d))
|
621 |
+
|
622 |
+
G = nx.complement(G)
|
623 |
+
H = nx.complement(H)
|
624 |
+
|
625 |
+
for u, v, c in G.edges(data=True):
|
626 |
+
for x, y, d in H.edges(data=True):
|
627 |
+
GH.add_edge((u, x), (v, y), **_dict_product(c, d))
|
628 |
+
GH.add_edge((v, x), (u, y), **_dict_product(c, d))
|
629 |
+
|
630 |
+
return GH
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/__init__.py
ADDED
File without changes
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/__pycache__/test_binary.cpython-310.pyc
ADDED
Binary file (11.8 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/test_all.py
ADDED
@@ -0,0 +1,328 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
from networkx.utils import edges_equal
|
5 |
+
|
6 |
+
|
7 |
+
def test_union_all_attributes():
|
8 |
+
g = nx.Graph()
|
9 |
+
g.add_node(0, x=4)
|
10 |
+
g.add_node(1, x=5)
|
11 |
+
g.add_edge(0, 1, size=5)
|
12 |
+
g.graph["name"] = "g"
|
13 |
+
|
14 |
+
h = g.copy()
|
15 |
+
h.graph["name"] = "h"
|
16 |
+
h.graph["attr"] = "attr"
|
17 |
+
h.nodes[0]["x"] = 7
|
18 |
+
|
19 |
+
j = g.copy()
|
20 |
+
j.graph["name"] = "j"
|
21 |
+
j.graph["attr"] = "attr"
|
22 |
+
j.nodes[0]["x"] = 7
|
23 |
+
|
24 |
+
ghj = nx.union_all([g, h, j], rename=("g", "h", "j"))
|
25 |
+
assert set(ghj.nodes()) == {"h0", "h1", "g0", "g1", "j0", "j1"}
|
26 |
+
for n in ghj:
|
27 |
+
graph, node = n
|
28 |
+
assert ghj.nodes[n] == eval(graph).nodes[int(node)]
|
29 |
+
|
30 |
+
assert ghj.graph["attr"] == "attr"
|
31 |
+
assert ghj.graph["name"] == "j" # j graph attributes take precedent
|
32 |
+
|
33 |
+
|
34 |
+
def test_intersection_all():
|
35 |
+
G = nx.Graph()
|
36 |
+
H = nx.Graph()
|
37 |
+
R = nx.Graph(awesome=True)
|
38 |
+
G.add_nodes_from([1, 2, 3, 4])
|
39 |
+
G.add_edge(1, 2)
|
40 |
+
G.add_edge(2, 3)
|
41 |
+
H.add_nodes_from([1, 2, 3, 4])
|
42 |
+
H.add_edge(2, 3)
|
43 |
+
H.add_edge(3, 4)
|
44 |
+
R.add_nodes_from([1, 2, 3, 4])
|
45 |
+
R.add_edge(2, 3)
|
46 |
+
R.add_edge(4, 1)
|
47 |
+
I = nx.intersection_all([G, H, R])
|
48 |
+
assert set(I.nodes()) == {1, 2, 3, 4}
|
49 |
+
assert sorted(I.edges()) == [(2, 3)]
|
50 |
+
assert I.graph == {}
|
51 |
+
|
52 |
+
|
53 |
+
def test_intersection_all_different_node_sets():
|
54 |
+
G = nx.Graph()
|
55 |
+
H = nx.Graph()
|
56 |
+
R = nx.Graph()
|
57 |
+
G.add_nodes_from([1, 2, 3, 4, 6, 7])
|
58 |
+
G.add_edge(1, 2)
|
59 |
+
G.add_edge(2, 3)
|
60 |
+
G.add_edge(6, 7)
|
61 |
+
H.add_nodes_from([1, 2, 3, 4])
|
62 |
+
H.add_edge(2, 3)
|
63 |
+
H.add_edge(3, 4)
|
64 |
+
R.add_nodes_from([1, 2, 3, 4, 8, 9])
|
65 |
+
R.add_edge(2, 3)
|
66 |
+
R.add_edge(4, 1)
|
67 |
+
R.add_edge(8, 9)
|
68 |
+
I = nx.intersection_all([G, H, R])
|
69 |
+
assert set(I.nodes()) == {1, 2, 3, 4}
|
70 |
+
assert sorted(I.edges()) == [(2, 3)]
|
71 |
+
|
72 |
+
|
73 |
+
def test_intersection_all_attributes():
|
74 |
+
g = nx.Graph()
|
75 |
+
g.add_node(0, x=4)
|
76 |
+
g.add_node(1, x=5)
|
77 |
+
g.add_edge(0, 1, size=5)
|
78 |
+
g.graph["name"] = "g"
|
79 |
+
|
80 |
+
h = g.copy()
|
81 |
+
h.graph["name"] = "h"
|
82 |
+
h.graph["attr"] = "attr"
|
83 |
+
h.nodes[0]["x"] = 7
|
84 |
+
|
85 |
+
gh = nx.intersection_all([g, h])
|
86 |
+
assert set(gh.nodes()) == set(g.nodes())
|
87 |
+
assert set(gh.nodes()) == set(h.nodes())
|
88 |
+
assert sorted(gh.edges()) == sorted(g.edges())
|
89 |
+
|
90 |
+
|
91 |
+
def test_intersection_all_attributes_different_node_sets():
|
92 |
+
g = nx.Graph()
|
93 |
+
g.add_node(0, x=4)
|
94 |
+
g.add_node(1, x=5)
|
95 |
+
g.add_edge(0, 1, size=5)
|
96 |
+
g.graph["name"] = "g"
|
97 |
+
|
98 |
+
h = g.copy()
|
99 |
+
g.add_node(2)
|
100 |
+
h.graph["name"] = "h"
|
101 |
+
h.graph["attr"] = "attr"
|
102 |
+
h.nodes[0]["x"] = 7
|
103 |
+
|
104 |
+
gh = nx.intersection_all([g, h])
|
105 |
+
assert set(gh.nodes()) == set(h.nodes())
|
106 |
+
assert sorted(gh.edges()) == sorted(g.edges())
|
107 |
+
|
108 |
+
|
109 |
+
def test_intersection_all_multigraph_attributes():
|
110 |
+
g = nx.MultiGraph()
|
111 |
+
g.add_edge(0, 1, key=0)
|
112 |
+
g.add_edge(0, 1, key=1)
|
113 |
+
g.add_edge(0, 1, key=2)
|
114 |
+
h = nx.MultiGraph()
|
115 |
+
h.add_edge(0, 1, key=0)
|
116 |
+
h.add_edge(0, 1, key=3)
|
117 |
+
gh = nx.intersection_all([g, h])
|
118 |
+
assert set(gh.nodes()) == set(g.nodes())
|
119 |
+
assert set(gh.nodes()) == set(h.nodes())
|
120 |
+
assert sorted(gh.edges()) == [(0, 1)]
|
121 |
+
assert sorted(gh.edges(keys=True)) == [(0, 1, 0)]
|
122 |
+
|
123 |
+
|
124 |
+
def test_intersection_all_multigraph_attributes_different_node_sets():
|
125 |
+
g = nx.MultiGraph()
|
126 |
+
g.add_edge(0, 1, key=0)
|
127 |
+
g.add_edge(0, 1, key=1)
|
128 |
+
g.add_edge(0, 1, key=2)
|
129 |
+
g.add_edge(1, 2, key=1)
|
130 |
+
g.add_edge(1, 2, key=2)
|
131 |
+
h = nx.MultiGraph()
|
132 |
+
h.add_edge(0, 1, key=0)
|
133 |
+
h.add_edge(0, 1, key=2)
|
134 |
+
h.add_edge(0, 1, key=3)
|
135 |
+
gh = nx.intersection_all([g, h])
|
136 |
+
assert set(gh.nodes()) == set(h.nodes())
|
137 |
+
assert sorted(gh.edges()) == [(0, 1), (0, 1)]
|
138 |
+
assert sorted(gh.edges(keys=True)) == [(0, 1, 0), (0, 1, 2)]
|
139 |
+
|
140 |
+
|
141 |
+
def test_intersection_all_digraph():
|
142 |
+
g = nx.DiGraph()
|
143 |
+
g.add_edges_from([(1, 2), (2, 3)])
|
144 |
+
h = nx.DiGraph()
|
145 |
+
h.add_edges_from([(2, 1), (2, 3)])
|
146 |
+
gh = nx.intersection_all([g, h])
|
147 |
+
assert sorted(gh.edges()) == [(2, 3)]
|
148 |
+
|
149 |
+
|
150 |
+
def test_union_all_and_compose_all():
|
151 |
+
K3 = nx.complete_graph(3)
|
152 |
+
P3 = nx.path_graph(3)
|
153 |
+
|
154 |
+
G1 = nx.DiGraph()
|
155 |
+
G1.add_edge("A", "B")
|
156 |
+
G1.add_edge("A", "C")
|
157 |
+
G1.add_edge("A", "D")
|
158 |
+
G2 = nx.DiGraph()
|
159 |
+
G2.add_edge("1", "2")
|
160 |
+
G2.add_edge("1", "3")
|
161 |
+
G2.add_edge("1", "4")
|
162 |
+
|
163 |
+
G = nx.union_all([G1, G2])
|
164 |
+
H = nx.compose_all([G1, G2])
|
165 |
+
assert edges_equal(G.edges(), H.edges())
|
166 |
+
assert not G.has_edge("A", "1")
|
167 |
+
pytest.raises(nx.NetworkXError, nx.union, K3, P3)
|
168 |
+
H1 = nx.union_all([H, G1], rename=("H", "G1"))
|
169 |
+
assert sorted(H1.nodes()) == [
|
170 |
+
"G1A",
|
171 |
+
"G1B",
|
172 |
+
"G1C",
|
173 |
+
"G1D",
|
174 |
+
"H1",
|
175 |
+
"H2",
|
176 |
+
"H3",
|
177 |
+
"H4",
|
178 |
+
"HA",
|
179 |
+
"HB",
|
180 |
+
"HC",
|
181 |
+
"HD",
|
182 |
+
]
|
183 |
+
|
184 |
+
H2 = nx.union_all([H, G2], rename=("H", ""))
|
185 |
+
assert sorted(H2.nodes()) == [
|
186 |
+
"1",
|
187 |
+
"2",
|
188 |
+
"3",
|
189 |
+
"4",
|
190 |
+
"H1",
|
191 |
+
"H2",
|
192 |
+
"H3",
|
193 |
+
"H4",
|
194 |
+
"HA",
|
195 |
+
"HB",
|
196 |
+
"HC",
|
197 |
+
"HD",
|
198 |
+
]
|
199 |
+
|
200 |
+
assert not H1.has_edge("NB", "NA")
|
201 |
+
|
202 |
+
G = nx.compose_all([G, G])
|
203 |
+
assert edges_equal(G.edges(), H.edges())
|
204 |
+
|
205 |
+
G2 = nx.union_all([G2, G2], rename=("", "copy"))
|
206 |
+
assert sorted(G2.nodes()) == [
|
207 |
+
"1",
|
208 |
+
"2",
|
209 |
+
"3",
|
210 |
+
"4",
|
211 |
+
"copy1",
|
212 |
+
"copy2",
|
213 |
+
"copy3",
|
214 |
+
"copy4",
|
215 |
+
]
|
216 |
+
|
217 |
+
assert sorted(G2.neighbors("copy4")) == []
|
218 |
+
assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"]
|
219 |
+
assert len(G) == 8
|
220 |
+
assert nx.number_of_edges(G) == 6
|
221 |
+
|
222 |
+
E = nx.disjoint_union_all([G, G])
|
223 |
+
assert len(E) == 16
|
224 |
+
assert nx.number_of_edges(E) == 12
|
225 |
+
|
226 |
+
E = nx.disjoint_union_all([G1, G2])
|
227 |
+
assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
|
228 |
+
|
229 |
+
G1 = nx.DiGraph()
|
230 |
+
G1.add_edge("A", "B")
|
231 |
+
G2 = nx.DiGraph()
|
232 |
+
G2.add_edge(1, 2)
|
233 |
+
G3 = nx.DiGraph()
|
234 |
+
G3.add_edge(11, 22)
|
235 |
+
G4 = nx.union_all([G1, G2, G3], rename=("G1", "G2", "G3"))
|
236 |
+
assert sorted(G4.nodes()) == ["G1A", "G1B", "G21", "G22", "G311", "G322"]
|
237 |
+
|
238 |
+
|
239 |
+
def test_union_all_multigraph():
|
240 |
+
G = nx.MultiGraph()
|
241 |
+
G.add_edge(1, 2, key=0)
|
242 |
+
G.add_edge(1, 2, key=1)
|
243 |
+
H = nx.MultiGraph()
|
244 |
+
H.add_edge(3, 4, key=0)
|
245 |
+
H.add_edge(3, 4, key=1)
|
246 |
+
GH = nx.union_all([G, H])
|
247 |
+
assert set(GH) == set(G) | set(H)
|
248 |
+
assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
|
249 |
+
|
250 |
+
|
251 |
+
def test_input_output():
|
252 |
+
l = [nx.Graph([(1, 2)]), nx.Graph([(3, 4)], awesome=True)]
|
253 |
+
U = nx.disjoint_union_all(l)
|
254 |
+
assert len(l) == 2
|
255 |
+
assert U.graph["awesome"]
|
256 |
+
C = nx.compose_all(l)
|
257 |
+
assert len(l) == 2
|
258 |
+
l = [nx.Graph([(1, 2)]), nx.Graph([(1, 2)])]
|
259 |
+
R = nx.intersection_all(l)
|
260 |
+
assert len(l) == 2
|
261 |
+
|
262 |
+
|
263 |
+
def test_mixed_type_union():
|
264 |
+
with pytest.raises(nx.NetworkXError):
|
265 |
+
G = nx.Graph()
|
266 |
+
H = nx.MultiGraph()
|
267 |
+
I = nx.Graph()
|
268 |
+
U = nx.union_all([G, H, I])
|
269 |
+
with pytest.raises(nx.NetworkXError):
|
270 |
+
X = nx.Graph()
|
271 |
+
Y = nx.DiGraph()
|
272 |
+
XY = nx.union_all([X, Y])
|
273 |
+
|
274 |
+
|
275 |
+
def test_mixed_type_disjoint_union():
|
276 |
+
with pytest.raises(nx.NetworkXError):
|
277 |
+
G = nx.Graph()
|
278 |
+
H = nx.MultiGraph()
|
279 |
+
I = nx.Graph()
|
280 |
+
U = nx.disjoint_union_all([G, H, I])
|
281 |
+
with pytest.raises(nx.NetworkXError):
|
282 |
+
X = nx.Graph()
|
283 |
+
Y = nx.DiGraph()
|
284 |
+
XY = nx.disjoint_union_all([X, Y])
|
285 |
+
|
286 |
+
|
287 |
+
def test_mixed_type_intersection():
|
288 |
+
with pytest.raises(nx.NetworkXError):
|
289 |
+
G = nx.Graph()
|
290 |
+
H = nx.MultiGraph()
|
291 |
+
I = nx.Graph()
|
292 |
+
U = nx.intersection_all([G, H, I])
|
293 |
+
with pytest.raises(nx.NetworkXError):
|
294 |
+
X = nx.Graph()
|
295 |
+
Y = nx.DiGraph()
|
296 |
+
XY = nx.intersection_all([X, Y])
|
297 |
+
|
298 |
+
|
299 |
+
def test_mixed_type_compose():
|
300 |
+
with pytest.raises(nx.NetworkXError):
|
301 |
+
G = nx.Graph()
|
302 |
+
H = nx.MultiGraph()
|
303 |
+
I = nx.Graph()
|
304 |
+
U = nx.compose_all([G, H, I])
|
305 |
+
with pytest.raises(nx.NetworkXError):
|
306 |
+
X = nx.Graph()
|
307 |
+
Y = nx.DiGraph()
|
308 |
+
XY = nx.compose_all([X, Y])
|
309 |
+
|
310 |
+
|
311 |
+
def test_empty_union():
|
312 |
+
with pytest.raises(ValueError):
|
313 |
+
nx.union_all([])
|
314 |
+
|
315 |
+
|
316 |
+
def test_empty_disjoint_union():
|
317 |
+
with pytest.raises(ValueError):
|
318 |
+
nx.disjoint_union_all([])
|
319 |
+
|
320 |
+
|
321 |
+
def test_empty_compose_all():
|
322 |
+
with pytest.raises(ValueError):
|
323 |
+
nx.compose_all([])
|
324 |
+
|
325 |
+
|
326 |
+
def test_empty_intersection_all():
|
327 |
+
with pytest.raises(ValueError):
|
328 |
+
nx.intersection_all([])
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/test_binary.py
ADDED
@@ -0,0 +1,471 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
from networkx.classes.tests import dispatch_interface
|
7 |
+
from networkx.utils import edges_equal
|
8 |
+
|
9 |
+
|
10 |
+
def test_union_attributes():
|
11 |
+
g = nx.Graph()
|
12 |
+
g.add_node(0, x=4)
|
13 |
+
g.add_node(1, x=5)
|
14 |
+
g.add_edge(0, 1, size=5)
|
15 |
+
g.graph["name"] = "g"
|
16 |
+
|
17 |
+
h = g.copy()
|
18 |
+
h.graph["name"] = "h"
|
19 |
+
h.graph["attr"] = "attr"
|
20 |
+
h.nodes[0]["x"] = 7
|
21 |
+
|
22 |
+
gh = nx.union(g, h, rename=("g", "h"))
|
23 |
+
assert set(gh.nodes()) == {"h0", "h1", "g0", "g1"}
|
24 |
+
for n in gh:
|
25 |
+
graph, node = n
|
26 |
+
assert gh.nodes[n] == eval(graph).nodes[int(node)]
|
27 |
+
|
28 |
+
assert gh.graph["attr"] == "attr"
|
29 |
+
assert gh.graph["name"] == "h" # h graph attributes take precedent
|
30 |
+
|
31 |
+
|
32 |
+
def test_intersection():
|
33 |
+
G = nx.Graph()
|
34 |
+
H = nx.Graph()
|
35 |
+
G.add_nodes_from([1, 2, 3, 4])
|
36 |
+
G.add_edge(1, 2)
|
37 |
+
G.add_edge(2, 3)
|
38 |
+
H.add_nodes_from([1, 2, 3, 4])
|
39 |
+
H.add_edge(2, 3)
|
40 |
+
H.add_edge(3, 4)
|
41 |
+
I = nx.intersection(G, H)
|
42 |
+
assert set(I.nodes()) == {1, 2, 3, 4}
|
43 |
+
assert sorted(I.edges()) == [(2, 3)]
|
44 |
+
|
45 |
+
##################
|
46 |
+
# Tests for @nx._dispatchable mechanism with multiple graph arguments
|
47 |
+
# nx.intersection is called as if it were a re-implementation
|
48 |
+
# from another package.
|
49 |
+
###################
|
50 |
+
G2 = dispatch_interface.convert(G)
|
51 |
+
H2 = dispatch_interface.convert(H)
|
52 |
+
I2 = nx.intersection(G2, H2)
|
53 |
+
assert set(I2.nodes()) == {1, 2, 3, 4}
|
54 |
+
assert sorted(I2.edges()) == [(2, 3)]
|
55 |
+
# Only test if not performing auto convert testing of backend implementations
|
56 |
+
if not nx.config["backend_priority"]:
|
57 |
+
with pytest.raises(TypeError):
|
58 |
+
nx.intersection(G2, H)
|
59 |
+
with pytest.raises(TypeError):
|
60 |
+
nx.intersection(G, H2)
|
61 |
+
|
62 |
+
|
63 |
+
def test_intersection_node_sets_different():
|
64 |
+
G = nx.Graph()
|
65 |
+
H = nx.Graph()
|
66 |
+
G.add_nodes_from([1, 2, 3, 4, 7])
|
67 |
+
G.add_edge(1, 2)
|
68 |
+
G.add_edge(2, 3)
|
69 |
+
H.add_nodes_from([1, 2, 3, 4, 5, 6])
|
70 |
+
H.add_edge(2, 3)
|
71 |
+
H.add_edge(3, 4)
|
72 |
+
H.add_edge(5, 6)
|
73 |
+
I = nx.intersection(G, H)
|
74 |
+
assert set(I.nodes()) == {1, 2, 3, 4}
|
75 |
+
assert sorted(I.edges()) == [(2, 3)]
|
76 |
+
|
77 |
+
|
78 |
+
def test_intersection_attributes():
|
79 |
+
g = nx.Graph()
|
80 |
+
g.add_node(0, x=4)
|
81 |
+
g.add_node(1, x=5)
|
82 |
+
g.add_edge(0, 1, size=5)
|
83 |
+
g.graph["name"] = "g"
|
84 |
+
|
85 |
+
h = g.copy()
|
86 |
+
h.graph["name"] = "h"
|
87 |
+
h.graph["attr"] = "attr"
|
88 |
+
h.nodes[0]["x"] = 7
|
89 |
+
gh = nx.intersection(g, h)
|
90 |
+
|
91 |
+
assert set(gh.nodes()) == set(g.nodes())
|
92 |
+
assert set(gh.nodes()) == set(h.nodes())
|
93 |
+
assert sorted(gh.edges()) == sorted(g.edges())
|
94 |
+
|
95 |
+
|
96 |
+
def test_intersection_attributes_node_sets_different():
|
97 |
+
g = nx.Graph()
|
98 |
+
g.add_node(0, x=4)
|
99 |
+
g.add_node(1, x=5)
|
100 |
+
g.add_node(2, x=3)
|
101 |
+
g.add_edge(0, 1, size=5)
|
102 |
+
g.graph["name"] = "g"
|
103 |
+
|
104 |
+
h = g.copy()
|
105 |
+
h.graph["name"] = "h"
|
106 |
+
h.graph["attr"] = "attr"
|
107 |
+
h.nodes[0]["x"] = 7
|
108 |
+
h.remove_node(2)
|
109 |
+
|
110 |
+
gh = nx.intersection(g, h)
|
111 |
+
assert set(gh.nodes()) == set(h.nodes())
|
112 |
+
assert sorted(gh.edges()) == sorted(g.edges())
|
113 |
+
|
114 |
+
|
115 |
+
def test_intersection_multigraph_attributes():
|
116 |
+
g = nx.MultiGraph()
|
117 |
+
g.add_edge(0, 1, key=0)
|
118 |
+
g.add_edge(0, 1, key=1)
|
119 |
+
g.add_edge(0, 1, key=2)
|
120 |
+
h = nx.MultiGraph()
|
121 |
+
h.add_edge(0, 1, key=0)
|
122 |
+
h.add_edge(0, 1, key=3)
|
123 |
+
gh = nx.intersection(g, h)
|
124 |
+
assert set(gh.nodes()) == set(g.nodes())
|
125 |
+
assert set(gh.nodes()) == set(h.nodes())
|
126 |
+
assert sorted(gh.edges()) == [(0, 1)]
|
127 |
+
assert sorted(gh.edges(keys=True)) == [(0, 1, 0)]
|
128 |
+
|
129 |
+
|
130 |
+
def test_intersection_multigraph_attributes_node_set_different():
|
131 |
+
g = nx.MultiGraph()
|
132 |
+
g.add_edge(0, 1, key=0)
|
133 |
+
g.add_edge(0, 1, key=1)
|
134 |
+
g.add_edge(0, 1, key=2)
|
135 |
+
g.add_edge(0, 2, key=2)
|
136 |
+
g.add_edge(0, 2, key=1)
|
137 |
+
h = nx.MultiGraph()
|
138 |
+
h.add_edge(0, 1, key=0)
|
139 |
+
h.add_edge(0, 1, key=3)
|
140 |
+
gh = nx.intersection(g, h)
|
141 |
+
assert set(gh.nodes()) == set(h.nodes())
|
142 |
+
assert sorted(gh.edges()) == [(0, 1)]
|
143 |
+
assert sorted(gh.edges(keys=True)) == [(0, 1, 0)]
|
144 |
+
|
145 |
+
|
146 |
+
def test_difference():
|
147 |
+
G = nx.Graph()
|
148 |
+
H = nx.Graph()
|
149 |
+
G.add_nodes_from([1, 2, 3, 4])
|
150 |
+
G.add_edge(1, 2)
|
151 |
+
G.add_edge(2, 3)
|
152 |
+
H.add_nodes_from([1, 2, 3, 4])
|
153 |
+
H.add_edge(2, 3)
|
154 |
+
H.add_edge(3, 4)
|
155 |
+
D = nx.difference(G, H)
|
156 |
+
assert set(D.nodes()) == {1, 2, 3, 4}
|
157 |
+
assert sorted(D.edges()) == [(1, 2)]
|
158 |
+
D = nx.difference(H, G)
|
159 |
+
assert set(D.nodes()) == {1, 2, 3, 4}
|
160 |
+
assert sorted(D.edges()) == [(3, 4)]
|
161 |
+
D = nx.symmetric_difference(G, H)
|
162 |
+
assert set(D.nodes()) == {1, 2, 3, 4}
|
163 |
+
assert sorted(D.edges()) == [(1, 2), (3, 4)]
|
164 |
+
|
165 |
+
|
166 |
+
def test_difference2():
|
167 |
+
G = nx.Graph()
|
168 |
+
H = nx.Graph()
|
169 |
+
G.add_nodes_from([1, 2, 3, 4])
|
170 |
+
H.add_nodes_from([1, 2, 3, 4])
|
171 |
+
G.add_edge(1, 2)
|
172 |
+
H.add_edge(1, 2)
|
173 |
+
G.add_edge(2, 3)
|
174 |
+
D = nx.difference(G, H)
|
175 |
+
assert set(D.nodes()) == {1, 2, 3, 4}
|
176 |
+
assert sorted(D.edges()) == [(2, 3)]
|
177 |
+
D = nx.difference(H, G)
|
178 |
+
assert set(D.nodes()) == {1, 2, 3, 4}
|
179 |
+
assert sorted(D.edges()) == []
|
180 |
+
H.add_edge(3, 4)
|
181 |
+
D = nx.difference(H, G)
|
182 |
+
assert set(D.nodes()) == {1, 2, 3, 4}
|
183 |
+
assert sorted(D.edges()) == [(3, 4)]
|
184 |
+
|
185 |
+
|
186 |
+
def test_difference_attributes():
|
187 |
+
g = nx.Graph()
|
188 |
+
g.add_node(0, x=4)
|
189 |
+
g.add_node(1, x=5)
|
190 |
+
g.add_edge(0, 1, size=5)
|
191 |
+
g.graph["name"] = "g"
|
192 |
+
|
193 |
+
h = g.copy()
|
194 |
+
h.graph["name"] = "h"
|
195 |
+
h.graph["attr"] = "attr"
|
196 |
+
h.nodes[0]["x"] = 7
|
197 |
+
|
198 |
+
gh = nx.difference(g, h)
|
199 |
+
assert set(gh.nodes()) == set(g.nodes())
|
200 |
+
assert set(gh.nodes()) == set(h.nodes())
|
201 |
+
assert sorted(gh.edges()) == []
|
202 |
+
# node and graph data should not be copied over
|
203 |
+
assert gh.nodes.data() != g.nodes.data()
|
204 |
+
assert gh.graph != g.graph
|
205 |
+
|
206 |
+
|
207 |
+
def test_difference_multigraph_attributes():
|
208 |
+
g = nx.MultiGraph()
|
209 |
+
g.add_edge(0, 1, key=0)
|
210 |
+
g.add_edge(0, 1, key=1)
|
211 |
+
g.add_edge(0, 1, key=2)
|
212 |
+
h = nx.MultiGraph()
|
213 |
+
h.add_edge(0, 1, key=0)
|
214 |
+
h.add_edge(0, 1, key=3)
|
215 |
+
gh = nx.difference(g, h)
|
216 |
+
assert set(gh.nodes()) == set(g.nodes())
|
217 |
+
assert set(gh.nodes()) == set(h.nodes())
|
218 |
+
assert sorted(gh.edges()) == [(0, 1), (0, 1)]
|
219 |
+
assert sorted(gh.edges(keys=True)) == [(0, 1, 1), (0, 1, 2)]
|
220 |
+
|
221 |
+
|
222 |
+
def test_difference_raise():
|
223 |
+
G = nx.path_graph(4)
|
224 |
+
H = nx.path_graph(3)
|
225 |
+
pytest.raises(nx.NetworkXError, nx.difference, G, H)
|
226 |
+
pytest.raises(nx.NetworkXError, nx.symmetric_difference, G, H)
|
227 |
+
|
228 |
+
|
229 |
+
def test_symmetric_difference_multigraph():
|
230 |
+
g = nx.MultiGraph()
|
231 |
+
g.add_edge(0, 1, key=0)
|
232 |
+
g.add_edge(0, 1, key=1)
|
233 |
+
g.add_edge(0, 1, key=2)
|
234 |
+
h = nx.MultiGraph()
|
235 |
+
h.add_edge(0, 1, key=0)
|
236 |
+
h.add_edge(0, 1, key=3)
|
237 |
+
gh = nx.symmetric_difference(g, h)
|
238 |
+
assert set(gh.nodes()) == set(g.nodes())
|
239 |
+
assert set(gh.nodes()) == set(h.nodes())
|
240 |
+
assert sorted(gh.edges()) == 3 * [(0, 1)]
|
241 |
+
assert sorted(sorted(e) for e in gh.edges(keys=True)) == [
|
242 |
+
[0, 1, 1],
|
243 |
+
[0, 1, 2],
|
244 |
+
[0, 1, 3],
|
245 |
+
]
|
246 |
+
|
247 |
+
|
248 |
+
def test_union_and_compose():
|
249 |
+
K3 = nx.complete_graph(3)
|
250 |
+
P3 = nx.path_graph(3)
|
251 |
+
|
252 |
+
G1 = nx.DiGraph()
|
253 |
+
G1.add_edge("A", "B")
|
254 |
+
G1.add_edge("A", "C")
|
255 |
+
G1.add_edge("A", "D")
|
256 |
+
G2 = nx.DiGraph()
|
257 |
+
G2.add_edge("1", "2")
|
258 |
+
G2.add_edge("1", "3")
|
259 |
+
G2.add_edge("1", "4")
|
260 |
+
|
261 |
+
G = nx.union(G1, G2)
|
262 |
+
H = nx.compose(G1, G2)
|
263 |
+
assert edges_equal(G.edges(), H.edges())
|
264 |
+
assert not G.has_edge("A", 1)
|
265 |
+
pytest.raises(nx.NetworkXError, nx.union, K3, P3)
|
266 |
+
H1 = nx.union(H, G1, rename=("H", "G1"))
|
267 |
+
assert sorted(H1.nodes()) == [
|
268 |
+
"G1A",
|
269 |
+
"G1B",
|
270 |
+
"G1C",
|
271 |
+
"G1D",
|
272 |
+
"H1",
|
273 |
+
"H2",
|
274 |
+
"H3",
|
275 |
+
"H4",
|
276 |
+
"HA",
|
277 |
+
"HB",
|
278 |
+
"HC",
|
279 |
+
"HD",
|
280 |
+
]
|
281 |
+
|
282 |
+
H2 = nx.union(H, G2, rename=("H", ""))
|
283 |
+
assert sorted(H2.nodes()) == [
|
284 |
+
"1",
|
285 |
+
"2",
|
286 |
+
"3",
|
287 |
+
"4",
|
288 |
+
"H1",
|
289 |
+
"H2",
|
290 |
+
"H3",
|
291 |
+
"H4",
|
292 |
+
"HA",
|
293 |
+
"HB",
|
294 |
+
"HC",
|
295 |
+
"HD",
|
296 |
+
]
|
297 |
+
|
298 |
+
assert not H1.has_edge("NB", "NA")
|
299 |
+
|
300 |
+
G = nx.compose(G, G)
|
301 |
+
assert edges_equal(G.edges(), H.edges())
|
302 |
+
|
303 |
+
G2 = nx.union(G2, G2, rename=("", "copy"))
|
304 |
+
assert sorted(G2.nodes()) == [
|
305 |
+
"1",
|
306 |
+
"2",
|
307 |
+
"3",
|
308 |
+
"4",
|
309 |
+
"copy1",
|
310 |
+
"copy2",
|
311 |
+
"copy3",
|
312 |
+
"copy4",
|
313 |
+
]
|
314 |
+
|
315 |
+
assert sorted(G2.neighbors("copy4")) == []
|
316 |
+
assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"]
|
317 |
+
assert len(G) == 8
|
318 |
+
assert nx.number_of_edges(G) == 6
|
319 |
+
|
320 |
+
E = nx.disjoint_union(G, G)
|
321 |
+
assert len(E) == 16
|
322 |
+
assert nx.number_of_edges(E) == 12
|
323 |
+
|
324 |
+
E = nx.disjoint_union(G1, G2)
|
325 |
+
assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
|
326 |
+
|
327 |
+
G = nx.Graph()
|
328 |
+
H = nx.Graph()
|
329 |
+
G.add_nodes_from([(1, {"a1": 1})])
|
330 |
+
H.add_nodes_from([(1, {"b1": 1})])
|
331 |
+
R = nx.compose(G, H)
|
332 |
+
assert R.nodes == {1: {"a1": 1, "b1": 1}}
|
333 |
+
|
334 |
+
|
335 |
+
def test_union_multigraph():
|
336 |
+
G = nx.MultiGraph()
|
337 |
+
G.add_edge(1, 2, key=0)
|
338 |
+
G.add_edge(1, 2, key=1)
|
339 |
+
H = nx.MultiGraph()
|
340 |
+
H.add_edge(3, 4, key=0)
|
341 |
+
H.add_edge(3, 4, key=1)
|
342 |
+
GH = nx.union(G, H)
|
343 |
+
assert set(GH) == set(G) | set(H)
|
344 |
+
assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
|
345 |
+
|
346 |
+
|
347 |
+
def test_disjoint_union_multigraph():
|
348 |
+
G = nx.MultiGraph()
|
349 |
+
G.add_edge(0, 1, key=0)
|
350 |
+
G.add_edge(0, 1, key=1)
|
351 |
+
H = nx.MultiGraph()
|
352 |
+
H.add_edge(2, 3, key=0)
|
353 |
+
H.add_edge(2, 3, key=1)
|
354 |
+
GH = nx.disjoint_union(G, H)
|
355 |
+
assert set(GH) == set(G) | set(H)
|
356 |
+
assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
|
357 |
+
|
358 |
+
|
359 |
+
def test_compose_multigraph():
|
360 |
+
G = nx.MultiGraph()
|
361 |
+
G.add_edge(1, 2, key=0)
|
362 |
+
G.add_edge(1, 2, key=1)
|
363 |
+
H = nx.MultiGraph()
|
364 |
+
H.add_edge(3, 4, key=0)
|
365 |
+
H.add_edge(3, 4, key=1)
|
366 |
+
GH = nx.compose(G, H)
|
367 |
+
assert set(GH) == set(G) | set(H)
|
368 |
+
assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
|
369 |
+
H.add_edge(1, 2, key=2)
|
370 |
+
GH = nx.compose(G, H)
|
371 |
+
assert set(GH) == set(G) | set(H)
|
372 |
+
assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
|
373 |
+
|
374 |
+
|
375 |
+
def test_full_join_graph():
|
376 |
+
# Simple Graphs
|
377 |
+
G = nx.Graph()
|
378 |
+
G.add_node(0)
|
379 |
+
G.add_edge(1, 2)
|
380 |
+
H = nx.Graph()
|
381 |
+
H.add_edge(3, 4)
|
382 |
+
|
383 |
+
U = nx.full_join(G, H)
|
384 |
+
assert set(U) == set(G) | set(H)
|
385 |
+
assert len(U) == len(G) + len(H)
|
386 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
|
387 |
+
|
388 |
+
# Rename
|
389 |
+
U = nx.full_join(G, H, rename=("g", "h"))
|
390 |
+
assert set(U) == {"g0", "g1", "g2", "h3", "h4"}
|
391 |
+
assert len(U) == len(G) + len(H)
|
392 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
|
393 |
+
|
394 |
+
# Rename graphs with string-like nodes
|
395 |
+
G = nx.Graph()
|
396 |
+
G.add_node("a")
|
397 |
+
G.add_edge("b", "c")
|
398 |
+
H = nx.Graph()
|
399 |
+
H.add_edge("d", "e")
|
400 |
+
|
401 |
+
U = nx.full_join(G, H, rename=("g", "h"))
|
402 |
+
assert set(U) == {"ga", "gb", "gc", "hd", "he"}
|
403 |
+
assert len(U) == len(G) + len(H)
|
404 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
|
405 |
+
|
406 |
+
# DiGraphs
|
407 |
+
G = nx.DiGraph()
|
408 |
+
G.add_node(0)
|
409 |
+
G.add_edge(1, 2)
|
410 |
+
H = nx.DiGraph()
|
411 |
+
H.add_edge(3, 4)
|
412 |
+
|
413 |
+
U = nx.full_join(G, H)
|
414 |
+
assert set(U) == set(G) | set(H)
|
415 |
+
assert len(U) == len(G) + len(H)
|
416 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2
|
417 |
+
|
418 |
+
# DiGraphs Rename
|
419 |
+
U = nx.full_join(G, H, rename=("g", "h"))
|
420 |
+
assert set(U) == {"g0", "g1", "g2", "h3", "h4"}
|
421 |
+
assert len(U) == len(G) + len(H)
|
422 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2
|
423 |
+
|
424 |
+
|
425 |
+
def test_full_join_multigraph():
|
426 |
+
# MultiGraphs
|
427 |
+
G = nx.MultiGraph()
|
428 |
+
G.add_node(0)
|
429 |
+
G.add_edge(1, 2)
|
430 |
+
H = nx.MultiGraph()
|
431 |
+
H.add_edge(3, 4)
|
432 |
+
|
433 |
+
U = nx.full_join(G, H)
|
434 |
+
assert set(U) == set(G) | set(H)
|
435 |
+
assert len(U) == len(G) + len(H)
|
436 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
|
437 |
+
|
438 |
+
# MultiGraphs rename
|
439 |
+
U = nx.full_join(G, H, rename=("g", "h"))
|
440 |
+
assert set(U) == {"g0", "g1", "g2", "h3", "h4"}
|
441 |
+
assert len(U) == len(G) + len(H)
|
442 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
|
443 |
+
|
444 |
+
# MultiDiGraphs
|
445 |
+
G = nx.MultiDiGraph()
|
446 |
+
G.add_node(0)
|
447 |
+
G.add_edge(1, 2)
|
448 |
+
H = nx.MultiDiGraph()
|
449 |
+
H.add_edge(3, 4)
|
450 |
+
|
451 |
+
U = nx.full_join(G, H)
|
452 |
+
assert set(U) == set(G) | set(H)
|
453 |
+
assert len(U) == len(G) + len(H)
|
454 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2
|
455 |
+
|
456 |
+
# MultiDiGraphs rename
|
457 |
+
U = nx.full_join(G, H, rename=("g", "h"))
|
458 |
+
assert set(U) == {"g0", "g1", "g2", "h3", "h4"}
|
459 |
+
assert len(U) == len(G) + len(H)
|
460 |
+
assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2
|
461 |
+
|
462 |
+
|
463 |
+
def test_mixed_type_union():
|
464 |
+
G = nx.Graph()
|
465 |
+
H = nx.MultiGraph()
|
466 |
+
pytest.raises(nx.NetworkXError, nx.union, G, H)
|
467 |
+
pytest.raises(nx.NetworkXError, nx.disjoint_union, G, H)
|
468 |
+
pytest.raises(nx.NetworkXError, nx.intersection, G, H)
|
469 |
+
pytest.raises(nx.NetworkXError, nx.difference, G, H)
|
470 |
+
pytest.raises(nx.NetworkXError, nx.symmetric_difference, G, H)
|
471 |
+
pytest.raises(nx.NetworkXError, nx.compose, G, H)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/test_product.py
ADDED
@@ -0,0 +1,491 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
from networkx.utils import edges_equal
|
5 |
+
|
6 |
+
|
7 |
+
def test_tensor_product_raises():
|
8 |
+
with pytest.raises(nx.NetworkXError):
|
9 |
+
P = nx.tensor_product(nx.DiGraph(), nx.Graph())
|
10 |
+
|
11 |
+
|
12 |
+
def test_tensor_product_null():
|
13 |
+
null = nx.null_graph()
|
14 |
+
empty10 = nx.empty_graph(10)
|
15 |
+
K3 = nx.complete_graph(3)
|
16 |
+
K10 = nx.complete_graph(10)
|
17 |
+
P3 = nx.path_graph(3)
|
18 |
+
P10 = nx.path_graph(10)
|
19 |
+
# null graph
|
20 |
+
G = nx.tensor_product(null, null)
|
21 |
+
assert nx.is_isomorphic(G, null)
|
22 |
+
# null_graph X anything = null_graph and v.v.
|
23 |
+
G = nx.tensor_product(null, empty10)
|
24 |
+
assert nx.is_isomorphic(G, null)
|
25 |
+
G = nx.tensor_product(null, K3)
|
26 |
+
assert nx.is_isomorphic(G, null)
|
27 |
+
G = nx.tensor_product(null, K10)
|
28 |
+
assert nx.is_isomorphic(G, null)
|
29 |
+
G = nx.tensor_product(null, P3)
|
30 |
+
assert nx.is_isomorphic(G, null)
|
31 |
+
G = nx.tensor_product(null, P10)
|
32 |
+
assert nx.is_isomorphic(G, null)
|
33 |
+
G = nx.tensor_product(empty10, null)
|
34 |
+
assert nx.is_isomorphic(G, null)
|
35 |
+
G = nx.tensor_product(K3, null)
|
36 |
+
assert nx.is_isomorphic(G, null)
|
37 |
+
G = nx.tensor_product(K10, null)
|
38 |
+
assert nx.is_isomorphic(G, null)
|
39 |
+
G = nx.tensor_product(P3, null)
|
40 |
+
assert nx.is_isomorphic(G, null)
|
41 |
+
G = nx.tensor_product(P10, null)
|
42 |
+
assert nx.is_isomorphic(G, null)
|
43 |
+
|
44 |
+
|
45 |
+
def test_tensor_product_size():
|
46 |
+
P5 = nx.path_graph(5)
|
47 |
+
K3 = nx.complete_graph(3)
|
48 |
+
K5 = nx.complete_graph(5)
|
49 |
+
|
50 |
+
G = nx.tensor_product(P5, K3)
|
51 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
52 |
+
G = nx.tensor_product(K3, K5)
|
53 |
+
assert nx.number_of_nodes(G) == 3 * 5
|
54 |
+
|
55 |
+
|
56 |
+
def test_tensor_product_combinations():
|
57 |
+
# basic smoke test, more realistic tests would be useful
|
58 |
+
P5 = nx.path_graph(5)
|
59 |
+
K3 = nx.complete_graph(3)
|
60 |
+
G = nx.tensor_product(P5, K3)
|
61 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
62 |
+
G = nx.tensor_product(P5, nx.MultiGraph(K3))
|
63 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
64 |
+
G = nx.tensor_product(nx.MultiGraph(P5), K3)
|
65 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
66 |
+
G = nx.tensor_product(nx.MultiGraph(P5), nx.MultiGraph(K3))
|
67 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
68 |
+
|
69 |
+
G = nx.tensor_product(nx.DiGraph(P5), nx.DiGraph(K3))
|
70 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
71 |
+
|
72 |
+
|
73 |
+
def test_tensor_product_classic_result():
|
74 |
+
K2 = nx.complete_graph(2)
|
75 |
+
G = nx.petersen_graph()
|
76 |
+
G = nx.tensor_product(G, K2)
|
77 |
+
assert nx.is_isomorphic(G, nx.desargues_graph())
|
78 |
+
|
79 |
+
G = nx.cycle_graph(5)
|
80 |
+
G = nx.tensor_product(G, K2)
|
81 |
+
assert nx.is_isomorphic(G, nx.cycle_graph(10))
|
82 |
+
|
83 |
+
G = nx.tetrahedral_graph()
|
84 |
+
G = nx.tensor_product(G, K2)
|
85 |
+
assert nx.is_isomorphic(G, nx.cubical_graph())
|
86 |
+
|
87 |
+
|
88 |
+
def test_tensor_product_random():
|
89 |
+
G = nx.erdos_renyi_graph(10, 2 / 10.0)
|
90 |
+
H = nx.erdos_renyi_graph(10, 2 / 10.0)
|
91 |
+
GH = nx.tensor_product(G, H)
|
92 |
+
|
93 |
+
for u_G, u_H in GH.nodes():
|
94 |
+
for v_G, v_H in GH.nodes():
|
95 |
+
if H.has_edge(u_H, v_H) and G.has_edge(u_G, v_G):
|
96 |
+
assert GH.has_edge((u_G, u_H), (v_G, v_H))
|
97 |
+
else:
|
98 |
+
assert not GH.has_edge((u_G, u_H), (v_G, v_H))
|
99 |
+
|
100 |
+
|
101 |
+
def test_cartesian_product_multigraph():
|
102 |
+
G = nx.MultiGraph()
|
103 |
+
G.add_edge(1, 2, key=0)
|
104 |
+
G.add_edge(1, 2, key=1)
|
105 |
+
H = nx.MultiGraph()
|
106 |
+
H.add_edge(3, 4, key=0)
|
107 |
+
H.add_edge(3, 4, key=1)
|
108 |
+
GH = nx.cartesian_product(G, H)
|
109 |
+
assert set(GH) == {(1, 3), (2, 3), (2, 4), (1, 4)}
|
110 |
+
assert {(frozenset([u, v]), k) for u, v, k in GH.edges(keys=True)} == {
|
111 |
+
(frozenset([u, v]), k)
|
112 |
+
for u, v, k in [
|
113 |
+
((1, 3), (2, 3), 0),
|
114 |
+
((1, 3), (2, 3), 1),
|
115 |
+
((1, 3), (1, 4), 0),
|
116 |
+
((1, 3), (1, 4), 1),
|
117 |
+
((2, 3), (2, 4), 0),
|
118 |
+
((2, 3), (2, 4), 1),
|
119 |
+
((2, 4), (1, 4), 0),
|
120 |
+
((2, 4), (1, 4), 1),
|
121 |
+
]
|
122 |
+
}
|
123 |
+
|
124 |
+
|
125 |
+
def test_cartesian_product_raises():
|
126 |
+
with pytest.raises(nx.NetworkXError):
|
127 |
+
P = nx.cartesian_product(nx.DiGraph(), nx.Graph())
|
128 |
+
|
129 |
+
|
130 |
+
def test_cartesian_product_null():
|
131 |
+
null = nx.null_graph()
|
132 |
+
empty10 = nx.empty_graph(10)
|
133 |
+
K3 = nx.complete_graph(3)
|
134 |
+
K10 = nx.complete_graph(10)
|
135 |
+
P3 = nx.path_graph(3)
|
136 |
+
P10 = nx.path_graph(10)
|
137 |
+
# null graph
|
138 |
+
G = nx.cartesian_product(null, null)
|
139 |
+
assert nx.is_isomorphic(G, null)
|
140 |
+
# null_graph X anything = null_graph and v.v.
|
141 |
+
G = nx.cartesian_product(null, empty10)
|
142 |
+
assert nx.is_isomorphic(G, null)
|
143 |
+
G = nx.cartesian_product(null, K3)
|
144 |
+
assert nx.is_isomorphic(G, null)
|
145 |
+
G = nx.cartesian_product(null, K10)
|
146 |
+
assert nx.is_isomorphic(G, null)
|
147 |
+
G = nx.cartesian_product(null, P3)
|
148 |
+
assert nx.is_isomorphic(G, null)
|
149 |
+
G = nx.cartesian_product(null, P10)
|
150 |
+
assert nx.is_isomorphic(G, null)
|
151 |
+
G = nx.cartesian_product(empty10, null)
|
152 |
+
assert nx.is_isomorphic(G, null)
|
153 |
+
G = nx.cartesian_product(K3, null)
|
154 |
+
assert nx.is_isomorphic(G, null)
|
155 |
+
G = nx.cartesian_product(K10, null)
|
156 |
+
assert nx.is_isomorphic(G, null)
|
157 |
+
G = nx.cartesian_product(P3, null)
|
158 |
+
assert nx.is_isomorphic(G, null)
|
159 |
+
G = nx.cartesian_product(P10, null)
|
160 |
+
assert nx.is_isomorphic(G, null)
|
161 |
+
|
162 |
+
|
163 |
+
def test_cartesian_product_size():
|
164 |
+
# order(GXH)=order(G)*order(H)
|
165 |
+
K5 = nx.complete_graph(5)
|
166 |
+
P5 = nx.path_graph(5)
|
167 |
+
K3 = nx.complete_graph(3)
|
168 |
+
G = nx.cartesian_product(P5, K3)
|
169 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
170 |
+
assert nx.number_of_edges(G) == nx.number_of_edges(P5) * nx.number_of_nodes(
|
171 |
+
K3
|
172 |
+
) + nx.number_of_edges(K3) * nx.number_of_nodes(P5)
|
173 |
+
G = nx.cartesian_product(K3, K5)
|
174 |
+
assert nx.number_of_nodes(G) == 3 * 5
|
175 |
+
assert nx.number_of_edges(G) == nx.number_of_edges(K5) * nx.number_of_nodes(
|
176 |
+
K3
|
177 |
+
) + nx.number_of_edges(K3) * nx.number_of_nodes(K5)
|
178 |
+
|
179 |
+
|
180 |
+
def test_cartesian_product_classic():
|
181 |
+
# test some classic product graphs
|
182 |
+
P2 = nx.path_graph(2)
|
183 |
+
P3 = nx.path_graph(3)
|
184 |
+
# cube = 2-path X 2-path
|
185 |
+
G = nx.cartesian_product(P2, P2)
|
186 |
+
G = nx.cartesian_product(P2, G)
|
187 |
+
assert nx.is_isomorphic(G, nx.cubical_graph())
|
188 |
+
|
189 |
+
# 3x3 grid
|
190 |
+
G = nx.cartesian_product(P3, P3)
|
191 |
+
assert nx.is_isomorphic(G, nx.grid_2d_graph(3, 3))
|
192 |
+
|
193 |
+
|
194 |
+
def test_cartesian_product_random():
|
195 |
+
G = nx.erdos_renyi_graph(10, 2 / 10.0)
|
196 |
+
H = nx.erdos_renyi_graph(10, 2 / 10.0)
|
197 |
+
GH = nx.cartesian_product(G, H)
|
198 |
+
|
199 |
+
for u_G, u_H in GH.nodes():
|
200 |
+
for v_G, v_H in GH.nodes():
|
201 |
+
if (u_G == v_G and H.has_edge(u_H, v_H)) or (
|
202 |
+
u_H == v_H and G.has_edge(u_G, v_G)
|
203 |
+
):
|
204 |
+
assert GH.has_edge((u_G, u_H), (v_G, v_H))
|
205 |
+
else:
|
206 |
+
assert not GH.has_edge((u_G, u_H), (v_G, v_H))
|
207 |
+
|
208 |
+
|
209 |
+
def test_lexicographic_product_raises():
|
210 |
+
with pytest.raises(nx.NetworkXError):
|
211 |
+
P = nx.lexicographic_product(nx.DiGraph(), nx.Graph())
|
212 |
+
|
213 |
+
|
214 |
+
def test_lexicographic_product_null():
|
215 |
+
null = nx.null_graph()
|
216 |
+
empty10 = nx.empty_graph(10)
|
217 |
+
K3 = nx.complete_graph(3)
|
218 |
+
K10 = nx.complete_graph(10)
|
219 |
+
P3 = nx.path_graph(3)
|
220 |
+
P10 = nx.path_graph(10)
|
221 |
+
# null graph
|
222 |
+
G = nx.lexicographic_product(null, null)
|
223 |
+
assert nx.is_isomorphic(G, null)
|
224 |
+
# null_graph X anything = null_graph and v.v.
|
225 |
+
G = nx.lexicographic_product(null, empty10)
|
226 |
+
assert nx.is_isomorphic(G, null)
|
227 |
+
G = nx.lexicographic_product(null, K3)
|
228 |
+
assert nx.is_isomorphic(G, null)
|
229 |
+
G = nx.lexicographic_product(null, K10)
|
230 |
+
assert nx.is_isomorphic(G, null)
|
231 |
+
G = nx.lexicographic_product(null, P3)
|
232 |
+
assert nx.is_isomorphic(G, null)
|
233 |
+
G = nx.lexicographic_product(null, P10)
|
234 |
+
assert nx.is_isomorphic(G, null)
|
235 |
+
G = nx.lexicographic_product(empty10, null)
|
236 |
+
assert nx.is_isomorphic(G, null)
|
237 |
+
G = nx.lexicographic_product(K3, null)
|
238 |
+
assert nx.is_isomorphic(G, null)
|
239 |
+
G = nx.lexicographic_product(K10, null)
|
240 |
+
assert nx.is_isomorphic(G, null)
|
241 |
+
G = nx.lexicographic_product(P3, null)
|
242 |
+
assert nx.is_isomorphic(G, null)
|
243 |
+
G = nx.lexicographic_product(P10, null)
|
244 |
+
assert nx.is_isomorphic(G, null)
|
245 |
+
|
246 |
+
|
247 |
+
def test_lexicographic_product_size():
|
248 |
+
K5 = nx.complete_graph(5)
|
249 |
+
P5 = nx.path_graph(5)
|
250 |
+
K3 = nx.complete_graph(3)
|
251 |
+
G = nx.lexicographic_product(P5, K3)
|
252 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
253 |
+
G = nx.lexicographic_product(K3, K5)
|
254 |
+
assert nx.number_of_nodes(G) == 3 * 5
|
255 |
+
|
256 |
+
|
257 |
+
def test_lexicographic_product_combinations():
|
258 |
+
P5 = nx.path_graph(5)
|
259 |
+
K3 = nx.complete_graph(3)
|
260 |
+
G = nx.lexicographic_product(P5, K3)
|
261 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
262 |
+
G = nx.lexicographic_product(nx.MultiGraph(P5), K3)
|
263 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
264 |
+
G = nx.lexicographic_product(P5, nx.MultiGraph(K3))
|
265 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
266 |
+
G = nx.lexicographic_product(nx.MultiGraph(P5), nx.MultiGraph(K3))
|
267 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
268 |
+
|
269 |
+
# No classic easily found classic results for lexicographic product
|
270 |
+
|
271 |
+
|
272 |
+
def test_lexicographic_product_random():
|
273 |
+
G = nx.erdos_renyi_graph(10, 2 / 10.0)
|
274 |
+
H = nx.erdos_renyi_graph(10, 2 / 10.0)
|
275 |
+
GH = nx.lexicographic_product(G, H)
|
276 |
+
|
277 |
+
for u_G, u_H in GH.nodes():
|
278 |
+
for v_G, v_H in GH.nodes():
|
279 |
+
if G.has_edge(u_G, v_G) or (u_G == v_G and H.has_edge(u_H, v_H)):
|
280 |
+
assert GH.has_edge((u_G, u_H), (v_G, v_H))
|
281 |
+
else:
|
282 |
+
assert not GH.has_edge((u_G, u_H), (v_G, v_H))
|
283 |
+
|
284 |
+
|
285 |
+
def test_strong_product_raises():
|
286 |
+
with pytest.raises(nx.NetworkXError):
|
287 |
+
P = nx.strong_product(nx.DiGraph(), nx.Graph())
|
288 |
+
|
289 |
+
|
290 |
+
def test_strong_product_null():
|
291 |
+
null = nx.null_graph()
|
292 |
+
empty10 = nx.empty_graph(10)
|
293 |
+
K3 = nx.complete_graph(3)
|
294 |
+
K10 = nx.complete_graph(10)
|
295 |
+
P3 = nx.path_graph(3)
|
296 |
+
P10 = nx.path_graph(10)
|
297 |
+
# null graph
|
298 |
+
G = nx.strong_product(null, null)
|
299 |
+
assert nx.is_isomorphic(G, null)
|
300 |
+
# null_graph X anything = null_graph and v.v.
|
301 |
+
G = nx.strong_product(null, empty10)
|
302 |
+
assert nx.is_isomorphic(G, null)
|
303 |
+
G = nx.strong_product(null, K3)
|
304 |
+
assert nx.is_isomorphic(G, null)
|
305 |
+
G = nx.strong_product(null, K10)
|
306 |
+
assert nx.is_isomorphic(G, null)
|
307 |
+
G = nx.strong_product(null, P3)
|
308 |
+
assert nx.is_isomorphic(G, null)
|
309 |
+
G = nx.strong_product(null, P10)
|
310 |
+
assert nx.is_isomorphic(G, null)
|
311 |
+
G = nx.strong_product(empty10, null)
|
312 |
+
assert nx.is_isomorphic(G, null)
|
313 |
+
G = nx.strong_product(K3, null)
|
314 |
+
assert nx.is_isomorphic(G, null)
|
315 |
+
G = nx.strong_product(K10, null)
|
316 |
+
assert nx.is_isomorphic(G, null)
|
317 |
+
G = nx.strong_product(P3, null)
|
318 |
+
assert nx.is_isomorphic(G, null)
|
319 |
+
G = nx.strong_product(P10, null)
|
320 |
+
assert nx.is_isomorphic(G, null)
|
321 |
+
|
322 |
+
|
323 |
+
def test_strong_product_size():
|
324 |
+
K5 = nx.complete_graph(5)
|
325 |
+
P5 = nx.path_graph(5)
|
326 |
+
K3 = nx.complete_graph(3)
|
327 |
+
G = nx.strong_product(P5, K3)
|
328 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
329 |
+
G = nx.strong_product(K3, K5)
|
330 |
+
assert nx.number_of_nodes(G) == 3 * 5
|
331 |
+
|
332 |
+
|
333 |
+
def test_strong_product_combinations():
|
334 |
+
P5 = nx.path_graph(5)
|
335 |
+
K3 = nx.complete_graph(3)
|
336 |
+
G = nx.strong_product(P5, K3)
|
337 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
338 |
+
G = nx.strong_product(nx.MultiGraph(P5), K3)
|
339 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
340 |
+
G = nx.strong_product(P5, nx.MultiGraph(K3))
|
341 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
342 |
+
G = nx.strong_product(nx.MultiGraph(P5), nx.MultiGraph(K3))
|
343 |
+
assert nx.number_of_nodes(G) == 5 * 3
|
344 |
+
|
345 |
+
# No classic easily found classic results for strong product
|
346 |
+
|
347 |
+
|
348 |
+
def test_strong_product_random():
|
349 |
+
G = nx.erdos_renyi_graph(10, 2 / 10.0)
|
350 |
+
H = nx.erdos_renyi_graph(10, 2 / 10.0)
|
351 |
+
GH = nx.strong_product(G, H)
|
352 |
+
|
353 |
+
for u_G, u_H in GH.nodes():
|
354 |
+
for v_G, v_H in GH.nodes():
|
355 |
+
if (
|
356 |
+
(u_G == v_G and H.has_edge(u_H, v_H))
|
357 |
+
or (u_H == v_H and G.has_edge(u_G, v_G))
|
358 |
+
or (G.has_edge(u_G, v_G) and H.has_edge(u_H, v_H))
|
359 |
+
):
|
360 |
+
assert GH.has_edge((u_G, u_H), (v_G, v_H))
|
361 |
+
else:
|
362 |
+
assert not GH.has_edge((u_G, u_H), (v_G, v_H))
|
363 |
+
|
364 |
+
|
365 |
+
def test_graph_power_raises():
|
366 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
367 |
+
nx.power(nx.MultiDiGraph(), 2)
|
368 |
+
|
369 |
+
|
370 |
+
def test_graph_power():
|
371 |
+
# wikipedia example for graph power
|
372 |
+
G = nx.cycle_graph(7)
|
373 |
+
G.add_edge(6, 7)
|
374 |
+
G.add_edge(7, 8)
|
375 |
+
G.add_edge(8, 9)
|
376 |
+
G.add_edge(9, 2)
|
377 |
+
H = nx.power(G, 2)
|
378 |
+
|
379 |
+
assert edges_equal(
|
380 |
+
list(H.edges()),
|
381 |
+
[
|
382 |
+
(0, 1),
|
383 |
+
(0, 2),
|
384 |
+
(0, 5),
|
385 |
+
(0, 6),
|
386 |
+
(0, 7),
|
387 |
+
(1, 9),
|
388 |
+
(1, 2),
|
389 |
+
(1, 3),
|
390 |
+
(1, 6),
|
391 |
+
(2, 3),
|
392 |
+
(2, 4),
|
393 |
+
(2, 8),
|
394 |
+
(2, 9),
|
395 |
+
(3, 4),
|
396 |
+
(3, 5),
|
397 |
+
(3, 9),
|
398 |
+
(4, 5),
|
399 |
+
(4, 6),
|
400 |
+
(5, 6),
|
401 |
+
(5, 7),
|
402 |
+
(6, 7),
|
403 |
+
(6, 8),
|
404 |
+
(7, 8),
|
405 |
+
(7, 9),
|
406 |
+
(8, 9),
|
407 |
+
],
|
408 |
+
)
|
409 |
+
|
410 |
+
|
411 |
+
def test_graph_power_negative():
|
412 |
+
with pytest.raises(ValueError):
|
413 |
+
nx.power(nx.Graph(), -1)
|
414 |
+
|
415 |
+
|
416 |
+
def test_rooted_product_raises():
|
417 |
+
with pytest.raises(nx.NetworkXError):
|
418 |
+
nx.rooted_product(nx.Graph(), nx.path_graph(2), 10)
|
419 |
+
|
420 |
+
|
421 |
+
def test_rooted_product():
|
422 |
+
G = nx.cycle_graph(5)
|
423 |
+
H = nx.Graph()
|
424 |
+
H.add_edges_from([("a", "b"), ("b", "c"), ("b", "d")])
|
425 |
+
R = nx.rooted_product(G, H, "a")
|
426 |
+
assert len(R) == len(G) * len(H)
|
427 |
+
assert R.size() == G.size() + len(G) * H.size()
|
428 |
+
|
429 |
+
|
430 |
+
def test_corona_product():
|
431 |
+
G = nx.cycle_graph(3)
|
432 |
+
H = nx.path_graph(2)
|
433 |
+
C = nx.corona_product(G, H)
|
434 |
+
assert len(C) == (len(G) * len(H)) + len(G)
|
435 |
+
assert C.size() == G.size() + len(G) * H.size() + len(G) * len(H)
|
436 |
+
|
437 |
+
|
438 |
+
def test_modular_product():
|
439 |
+
G = nx.path_graph(3)
|
440 |
+
H = nx.path_graph(4)
|
441 |
+
M = nx.modular_product(G, H)
|
442 |
+
assert len(M) == len(G) * len(H)
|
443 |
+
|
444 |
+
assert edges_equal(
|
445 |
+
list(M.edges()),
|
446 |
+
[
|
447 |
+
((0, 0), (1, 1)),
|
448 |
+
((0, 0), (2, 2)),
|
449 |
+
((0, 0), (2, 3)),
|
450 |
+
((0, 1), (1, 0)),
|
451 |
+
((0, 1), (1, 2)),
|
452 |
+
((0, 1), (2, 3)),
|
453 |
+
((0, 2), (1, 1)),
|
454 |
+
((0, 2), (1, 3)),
|
455 |
+
((0, 2), (2, 0)),
|
456 |
+
((0, 3), (1, 2)),
|
457 |
+
((0, 3), (2, 0)),
|
458 |
+
((0, 3), (2, 1)),
|
459 |
+
((1, 0), (2, 1)),
|
460 |
+
((1, 1), (2, 0)),
|
461 |
+
((1, 1), (2, 2)),
|
462 |
+
((1, 2), (2, 1)),
|
463 |
+
((1, 2), (2, 3)),
|
464 |
+
((1, 3), (2, 2)),
|
465 |
+
],
|
466 |
+
)
|
467 |
+
|
468 |
+
|
469 |
+
def test_modular_product_raises():
|
470 |
+
G = nx.Graph([(0, 1), (1, 2), (2, 0)])
|
471 |
+
H = nx.Graph([(0, 1), (1, 2), (2, 0)])
|
472 |
+
DG = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
|
473 |
+
DH = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
|
474 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
475 |
+
nx.modular_product(G, DH)
|
476 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
477 |
+
nx.modular_product(DG, H)
|
478 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
479 |
+
nx.modular_product(DG, DH)
|
480 |
+
|
481 |
+
MG = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)])
|
482 |
+
MH = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)])
|
483 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
484 |
+
nx.modular_product(G, MH)
|
485 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
486 |
+
nx.modular_product(MG, H)
|
487 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
488 |
+
nx.modular_product(MG, MH)
|
489 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
490 |
+
# check multigraph with no multiedges
|
491 |
+
nx.modular_product(nx.MultiGraph(G), H)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/tests/test_unary.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
|
6 |
+
def test_complement():
|
7 |
+
null = nx.null_graph()
|
8 |
+
empty1 = nx.empty_graph(1)
|
9 |
+
empty10 = nx.empty_graph(10)
|
10 |
+
K3 = nx.complete_graph(3)
|
11 |
+
K5 = nx.complete_graph(5)
|
12 |
+
K10 = nx.complete_graph(10)
|
13 |
+
P2 = nx.path_graph(2)
|
14 |
+
P3 = nx.path_graph(3)
|
15 |
+
P5 = nx.path_graph(5)
|
16 |
+
P10 = nx.path_graph(10)
|
17 |
+
# complement of the complete graph is empty
|
18 |
+
|
19 |
+
G = nx.complement(K3)
|
20 |
+
assert nx.is_isomorphic(G, nx.empty_graph(3))
|
21 |
+
G = nx.complement(K5)
|
22 |
+
assert nx.is_isomorphic(G, nx.empty_graph(5))
|
23 |
+
# for any G, G=complement(complement(G))
|
24 |
+
P3cc = nx.complement(nx.complement(P3))
|
25 |
+
assert nx.is_isomorphic(P3, P3cc)
|
26 |
+
nullcc = nx.complement(nx.complement(null))
|
27 |
+
assert nx.is_isomorphic(null, nullcc)
|
28 |
+
b = nx.bull_graph()
|
29 |
+
bcc = nx.complement(nx.complement(b))
|
30 |
+
assert nx.is_isomorphic(b, bcc)
|
31 |
+
|
32 |
+
|
33 |
+
def test_complement_2():
|
34 |
+
G1 = nx.DiGraph()
|
35 |
+
G1.add_edge("A", "B")
|
36 |
+
G1.add_edge("A", "C")
|
37 |
+
G1.add_edge("A", "D")
|
38 |
+
G1C = nx.complement(G1)
|
39 |
+
assert sorted(G1C.edges()) == [
|
40 |
+
("B", "A"),
|
41 |
+
("B", "C"),
|
42 |
+
("B", "D"),
|
43 |
+
("C", "A"),
|
44 |
+
("C", "B"),
|
45 |
+
("C", "D"),
|
46 |
+
("D", "A"),
|
47 |
+
("D", "B"),
|
48 |
+
("D", "C"),
|
49 |
+
]
|
50 |
+
|
51 |
+
|
52 |
+
def test_reverse1():
|
53 |
+
# Other tests for reverse are done by the DiGraph and MultiDigraph.
|
54 |
+
G1 = nx.Graph()
|
55 |
+
pytest.raises(nx.NetworkXError, nx.reverse, G1)
|
env-llmeval/lib/python3.10/site-packages/networkx/algorithms/operators/unary.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Unary operations on graphs"""
|
2 |
+
import networkx as nx
|
3 |
+
|
4 |
+
__all__ = ["complement", "reverse"]
|
5 |
+
|
6 |
+
|
7 |
+
@nx._dispatchable(returns_graph=True)
|
8 |
+
def complement(G):
|
9 |
+
"""Returns the graph complement of G.
|
10 |
+
|
11 |
+
Parameters
|
12 |
+
----------
|
13 |
+
G : graph
|
14 |
+
A NetworkX graph
|
15 |
+
|
16 |
+
Returns
|
17 |
+
-------
|
18 |
+
GC : A new graph.
|
19 |
+
|
20 |
+
Notes
|
21 |
+
-----
|
22 |
+
Note that `complement` does not create self-loops and also
|
23 |
+
does not produce parallel edges for MultiGraphs.
|
24 |
+
|
25 |
+
Graph, node, and edge data are not propagated to the new graph.
|
26 |
+
|
27 |
+
Examples
|
28 |
+
--------
|
29 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)])
|
30 |
+
>>> G_complement = nx.complement(G)
|
31 |
+
>>> G_complement.edges() # This shows the edges of the complemented graph
|
32 |
+
EdgeView([(1, 4), (1, 5), (2, 4), (2, 5), (4, 5)])
|
33 |
+
|
34 |
+
"""
|
35 |
+
R = G.__class__()
|
36 |
+
R.add_nodes_from(G)
|
37 |
+
R.add_edges_from(
|
38 |
+
((n, n2) for n, nbrs in G.adjacency() for n2 in G if n2 not in nbrs if n != n2)
|
39 |
+
)
|
40 |
+
return R
|
41 |
+
|
42 |
+
|
43 |
+
@nx._dispatchable(returns_graph=True)
|
44 |
+
def reverse(G, copy=True):
|
45 |
+
"""Returns the reverse directed graph of G.
|
46 |
+
|
47 |
+
Parameters
|
48 |
+
----------
|
49 |
+
G : directed graph
|
50 |
+
A NetworkX directed graph
|
51 |
+
copy : bool
|
52 |
+
If True, then a new graph is returned. If False, then the graph is
|
53 |
+
reversed in place.
|
54 |
+
|
55 |
+
Returns
|
56 |
+
-------
|
57 |
+
H : directed graph
|
58 |
+
The reversed G.
|
59 |
+
|
60 |
+
Raises
|
61 |
+
------
|
62 |
+
NetworkXError
|
63 |
+
If graph is undirected.
|
64 |
+
|
65 |
+
Examples
|
66 |
+
--------
|
67 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)])
|
68 |
+
>>> G_reversed = nx.reverse(G)
|
69 |
+
>>> G_reversed.edges()
|
70 |
+
OutEdgeView([(2, 1), (3, 1), (3, 2), (4, 3), (5, 3)])
|
71 |
+
|
72 |
+
"""
|
73 |
+
if not G.is_directed():
|
74 |
+
raise nx.NetworkXError("Cannot reverse an undirected graph.")
|
75 |
+
else:
|
76 |
+
return G.reverse(copy=copy)
|