Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ckpts/universal/global_step80/zero/1.word_embeddings.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step80/zero/3.attention.dense.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step80/zero/3.attention.dense.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step80/zero/3.attention.dense.weight/fp32.pt +3 -0
- ckpts/universal/global_step80/zero/5.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step80/zero/5.mlp.dense_h_to_4h_swiglu.weight/fp32.pt +3 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__init__.py +25 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/divisive.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/asyn_fluid.py +151 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/centrality.py +171 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/community_utils.py +29 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/divisive.py +196 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/kclique.py +79 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/kernighan_lin.py +139 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/label_propagation.py +337 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/louvain.py +382 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/lukes.py +227 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/modularity_max.py +451 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/quality.py +346 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__init__.py +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_divisive.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py +136 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_centrality.py +84 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_divisive.py +106 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_kclique.py +91 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py +91 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_label_propagation.py +241 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_louvain.py +264 -0
ckpts/universal/global_step80/zero/1.word_embeddings.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b8fc8e060da6651bab7e1102e5380f2358de5efce7080bda1b00f00354d4b48f
|
3 |
+
size 415237419
|
ckpts/universal/global_step80/zero/3.attention.dense.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:92a80bd64be5eba997980a2627b669500ea36f9a38fc189e5c421d2a203a13f6
|
3 |
+
size 16778396
|
ckpts/universal/global_step80/zero/3.attention.dense.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14f728afccf4d8d783a46de041deaaeb16e1e9fb7185087383b44cfcbb434588
|
3 |
+
size 16778411
|
ckpts/universal/global_step80/zero/3.attention.dense.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8226d59673cf2a085adc0c0f2794196701f0755cc65202d85297fbae0f64663b
|
3 |
+
size 16778317
|
ckpts/universal/global_step80/zero/5.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f1f02f7e113d1a38c9620bbff3cfc3cdef272f7b2e1a3157f266de6638f0ddc
|
3 |
+
size 33555612
|
ckpts/universal/global_step80/zero/5.mlp.dense_h_to_4h_swiglu.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ac371da278a2db15a650dac557983f0d2e12c67784d23f8320eeed6141716e5e
|
3 |
+
size 33555533
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__init__.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing and measuring community structure.
|
2 |
+
|
3 |
+
The ``community`` subpackage can be accessed by using :mod:`networkx.community`, then accessing the
|
4 |
+
functions as attributes of ``community``. For example::
|
5 |
+
|
6 |
+
>>> import networkx as nx
|
7 |
+
>>> G = nx.barbell_graph(5, 1)
|
8 |
+
>>> communities_generator = nx.community.girvan_newman(G)
|
9 |
+
>>> top_level_communities = next(communities_generator)
|
10 |
+
>>> next_level_communities = next(communities_generator)
|
11 |
+
>>> sorted(map(sorted, next_level_communities))
|
12 |
+
[[0, 1, 2, 3, 4], [5], [6, 7, 8, 9, 10]]
|
13 |
+
|
14 |
+
"""
|
15 |
+
from networkx.algorithms.community.asyn_fluid import *
|
16 |
+
from networkx.algorithms.community.centrality import *
|
17 |
+
from networkx.algorithms.community.divisive import *
|
18 |
+
from networkx.algorithms.community.kclique import *
|
19 |
+
from networkx.algorithms.community.kernighan_lin import *
|
20 |
+
from networkx.algorithms.community.label_propagation import *
|
21 |
+
from networkx.algorithms.community.lukes import *
|
22 |
+
from networkx.algorithms.community.modularity_max import *
|
23 |
+
from networkx.algorithms.community.quality import *
|
24 |
+
from networkx.algorithms.community.community_utils import *
|
25 |
+
from networkx.algorithms.community.louvain import *
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (1.36 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-310.pyc
ADDED
Binary file (4.35 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-310.pyc
ADDED
Binary file (6.31 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-310.pyc
ADDED
Binary file (1.3 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/divisive.cpython-310.pyc
ADDED
Binary file (5.62 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-310.pyc
ADDED
Binary file (2.42 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-310.pyc
ADDED
Binary file (5.44 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-310.pyc
ADDED
Binary file (10.5 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-310.pyc
ADDED
Binary file (13.8 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-310.pyc
ADDED
Binary file (7.27 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-310.pyc
ADDED
Binary file (13.3 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-310.pyc
ADDED
Binary file (11.4 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/asyn_fluid.py
ADDED
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Asynchronous Fluid Communities algorithm for community detection."""
|
2 |
+
|
3 |
+
from collections import Counter
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
from networkx.algorithms.components import is_connected
|
7 |
+
from networkx.exception import NetworkXError
|
8 |
+
from networkx.utils import groups, not_implemented_for, py_random_state
|
9 |
+
|
10 |
+
__all__ = ["asyn_fluidc"]
|
11 |
+
|
12 |
+
|
13 |
+
@not_implemented_for("directed")
|
14 |
+
@not_implemented_for("multigraph")
|
15 |
+
@py_random_state(3)
|
16 |
+
@nx._dispatchable
|
17 |
+
def asyn_fluidc(G, k, max_iter=100, seed=None):
|
18 |
+
"""Returns communities in `G` as detected by Fluid Communities algorithm.
|
19 |
+
|
20 |
+
The asynchronous fluid communities algorithm is described in
|
21 |
+
[1]_. The algorithm is based on the simple idea of fluids interacting
|
22 |
+
in an environment, expanding and pushing each other. Its initialization is
|
23 |
+
random, so found communities may vary on different executions.
|
24 |
+
|
25 |
+
The algorithm proceeds as follows. First each of the initial k communities
|
26 |
+
is initialized in a random vertex in the graph. Then the algorithm iterates
|
27 |
+
over all vertices in a random order, updating the community of each vertex
|
28 |
+
based on its own community and the communities of its neighbors. This
|
29 |
+
process is performed several times until convergence.
|
30 |
+
At all times, each community has a total density of 1, which is equally
|
31 |
+
distributed among the vertices it contains. If a vertex changes of
|
32 |
+
community, vertex densities of affected communities are adjusted
|
33 |
+
immediately. When a complete iteration over all vertices is done, such that
|
34 |
+
no vertex changes the community it belongs to, the algorithm has converged
|
35 |
+
and returns.
|
36 |
+
|
37 |
+
This is the original version of the algorithm described in [1]_.
|
38 |
+
Unfortunately, it does not support weighted graphs yet.
|
39 |
+
|
40 |
+
Parameters
|
41 |
+
----------
|
42 |
+
G : NetworkX graph
|
43 |
+
Graph must be simple and undirected.
|
44 |
+
|
45 |
+
k : integer
|
46 |
+
The number of communities to be found.
|
47 |
+
|
48 |
+
max_iter : integer
|
49 |
+
The number of maximum iterations allowed. By default 100.
|
50 |
+
|
51 |
+
seed : integer, random_state, or None (default)
|
52 |
+
Indicator of random number generation state.
|
53 |
+
See :ref:`Randomness<randomness>`.
|
54 |
+
|
55 |
+
Returns
|
56 |
+
-------
|
57 |
+
communities : iterable
|
58 |
+
Iterable of communities given as sets of nodes.
|
59 |
+
|
60 |
+
Notes
|
61 |
+
-----
|
62 |
+
k variable is not an optional argument.
|
63 |
+
|
64 |
+
References
|
65 |
+
----------
|
66 |
+
.. [1] Parés F., Garcia-Gasulla D. et al. "Fluid Communities: A
|
67 |
+
Competitive and Highly Scalable Community Detection Algorithm".
|
68 |
+
[https://arxiv.org/pdf/1703.09307.pdf].
|
69 |
+
"""
|
70 |
+
# Initial checks
|
71 |
+
if not isinstance(k, int):
|
72 |
+
raise NetworkXError("k must be an integer.")
|
73 |
+
if not k > 0:
|
74 |
+
raise NetworkXError("k must be greater than 0.")
|
75 |
+
if not is_connected(G):
|
76 |
+
raise NetworkXError("Fluid Communities require connected Graphs.")
|
77 |
+
if len(G) < k:
|
78 |
+
raise NetworkXError("k cannot be bigger than the number of nodes.")
|
79 |
+
# Initialization
|
80 |
+
max_density = 1.0
|
81 |
+
vertices = list(G)
|
82 |
+
seed.shuffle(vertices)
|
83 |
+
communities = {n: i for i, n in enumerate(vertices[:k])}
|
84 |
+
density = {}
|
85 |
+
com_to_numvertices = {}
|
86 |
+
for vertex in communities:
|
87 |
+
com_to_numvertices[communities[vertex]] = 1
|
88 |
+
density[communities[vertex]] = max_density
|
89 |
+
# Set up control variables and start iterating
|
90 |
+
iter_count = 0
|
91 |
+
cont = True
|
92 |
+
while cont:
|
93 |
+
cont = False
|
94 |
+
iter_count += 1
|
95 |
+
# Loop over all vertices in graph in a random order
|
96 |
+
vertices = list(G)
|
97 |
+
seed.shuffle(vertices)
|
98 |
+
for vertex in vertices:
|
99 |
+
# Updating rule
|
100 |
+
com_counter = Counter()
|
101 |
+
# Take into account self vertex community
|
102 |
+
try:
|
103 |
+
com_counter.update({communities[vertex]: density[communities[vertex]]})
|
104 |
+
except KeyError:
|
105 |
+
pass
|
106 |
+
# Gather neighbor vertex communities
|
107 |
+
for v in G[vertex]:
|
108 |
+
try:
|
109 |
+
com_counter.update({communities[v]: density[communities[v]]})
|
110 |
+
except KeyError:
|
111 |
+
continue
|
112 |
+
# Check which is the community with highest density
|
113 |
+
new_com = -1
|
114 |
+
if len(com_counter.keys()) > 0:
|
115 |
+
max_freq = max(com_counter.values())
|
116 |
+
best_communities = [
|
117 |
+
com
|
118 |
+
for com, freq in com_counter.items()
|
119 |
+
if (max_freq - freq) < 0.0001
|
120 |
+
]
|
121 |
+
# If actual vertex com in best communities, it is preserved
|
122 |
+
try:
|
123 |
+
if communities[vertex] in best_communities:
|
124 |
+
new_com = communities[vertex]
|
125 |
+
except KeyError:
|
126 |
+
pass
|
127 |
+
# If vertex community changes...
|
128 |
+
if new_com == -1:
|
129 |
+
# Set flag of non-convergence
|
130 |
+
cont = True
|
131 |
+
# Randomly chose a new community from candidates
|
132 |
+
new_com = seed.choice(best_communities)
|
133 |
+
# Update previous community status
|
134 |
+
try:
|
135 |
+
com_to_numvertices[communities[vertex]] -= 1
|
136 |
+
density[communities[vertex]] = (
|
137 |
+
max_density / com_to_numvertices[communities[vertex]]
|
138 |
+
)
|
139 |
+
except KeyError:
|
140 |
+
pass
|
141 |
+
# Update new community status
|
142 |
+
communities[vertex] = new_com
|
143 |
+
com_to_numvertices[communities[vertex]] += 1
|
144 |
+
density[communities[vertex]] = (
|
145 |
+
max_density / com_to_numvertices[communities[vertex]]
|
146 |
+
)
|
147 |
+
# If maximum iterations reached --> output actual results
|
148 |
+
if iter_count > max_iter:
|
149 |
+
break
|
150 |
+
# Return results by grouping communities as list of vertices
|
151 |
+
return iter(groups(communities).values())
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/centrality.py
ADDED
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing communities based on centrality notions."""
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = ["girvan_newman"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable(preserve_edge_attrs="most_valuable_edge")
|
9 |
+
def girvan_newman(G, most_valuable_edge=None):
|
10 |
+
"""Finds communities in a graph using the Girvan–Newman method.
|
11 |
+
|
12 |
+
Parameters
|
13 |
+
----------
|
14 |
+
G : NetworkX graph
|
15 |
+
|
16 |
+
most_valuable_edge : function
|
17 |
+
Function that takes a graph as input and outputs an edge. The
|
18 |
+
edge returned by this function will be recomputed and removed at
|
19 |
+
each iteration of the algorithm.
|
20 |
+
|
21 |
+
If not specified, the edge with the highest
|
22 |
+
:func:`networkx.edge_betweenness_centrality` will be used.
|
23 |
+
|
24 |
+
Returns
|
25 |
+
-------
|
26 |
+
iterator
|
27 |
+
Iterator over tuples of sets of nodes in `G`. Each set of node
|
28 |
+
is a community, each tuple is a sequence of communities at a
|
29 |
+
particular level of the algorithm.
|
30 |
+
|
31 |
+
Examples
|
32 |
+
--------
|
33 |
+
To get the first pair of communities::
|
34 |
+
|
35 |
+
>>> G = nx.path_graph(10)
|
36 |
+
>>> comp = nx.community.girvan_newman(G)
|
37 |
+
>>> tuple(sorted(c) for c in next(comp))
|
38 |
+
([0, 1, 2, 3, 4], [5, 6, 7, 8, 9])
|
39 |
+
|
40 |
+
To get only the first *k* tuples of communities, use
|
41 |
+
:func:`itertools.islice`::
|
42 |
+
|
43 |
+
>>> import itertools
|
44 |
+
>>> G = nx.path_graph(8)
|
45 |
+
>>> k = 2
|
46 |
+
>>> comp = nx.community.girvan_newman(G)
|
47 |
+
>>> for communities in itertools.islice(comp, k):
|
48 |
+
... print(tuple(sorted(c) for c in communities))
|
49 |
+
...
|
50 |
+
([0, 1, 2, 3], [4, 5, 6, 7])
|
51 |
+
([0, 1], [2, 3], [4, 5, 6, 7])
|
52 |
+
|
53 |
+
To stop getting tuples of communities once the number of communities
|
54 |
+
is greater than *k*, use :func:`itertools.takewhile`::
|
55 |
+
|
56 |
+
>>> import itertools
|
57 |
+
>>> G = nx.path_graph(8)
|
58 |
+
>>> k = 4
|
59 |
+
>>> comp = nx.community.girvan_newman(G)
|
60 |
+
>>> limited = itertools.takewhile(lambda c: len(c) <= k, comp)
|
61 |
+
>>> for communities in limited:
|
62 |
+
... print(tuple(sorted(c) for c in communities))
|
63 |
+
...
|
64 |
+
([0, 1, 2, 3], [4, 5, 6, 7])
|
65 |
+
([0, 1], [2, 3], [4, 5, 6, 7])
|
66 |
+
([0, 1], [2, 3], [4, 5], [6, 7])
|
67 |
+
|
68 |
+
To just choose an edge to remove based on the weight::
|
69 |
+
|
70 |
+
>>> from operator import itemgetter
|
71 |
+
>>> G = nx.path_graph(10)
|
72 |
+
>>> edges = G.edges()
|
73 |
+
>>> nx.set_edge_attributes(G, {(u, v): v for u, v in edges}, "weight")
|
74 |
+
>>> def heaviest(G):
|
75 |
+
... u, v, w = max(G.edges(data="weight"), key=itemgetter(2))
|
76 |
+
... return (u, v)
|
77 |
+
...
|
78 |
+
>>> comp = nx.community.girvan_newman(G, most_valuable_edge=heaviest)
|
79 |
+
>>> tuple(sorted(c) for c in next(comp))
|
80 |
+
([0, 1, 2, 3, 4, 5, 6, 7, 8], [9])
|
81 |
+
|
82 |
+
To utilize edge weights when choosing an edge with, for example, the
|
83 |
+
highest betweenness centrality::
|
84 |
+
|
85 |
+
>>> from networkx import edge_betweenness_centrality as betweenness
|
86 |
+
>>> def most_central_edge(G):
|
87 |
+
... centrality = betweenness(G, weight="weight")
|
88 |
+
... return max(centrality, key=centrality.get)
|
89 |
+
...
|
90 |
+
>>> G = nx.path_graph(10)
|
91 |
+
>>> comp = nx.community.girvan_newman(G, most_valuable_edge=most_central_edge)
|
92 |
+
>>> tuple(sorted(c) for c in next(comp))
|
93 |
+
([0, 1, 2, 3, 4], [5, 6, 7, 8, 9])
|
94 |
+
|
95 |
+
To specify a different ranking algorithm for edges, use the
|
96 |
+
`most_valuable_edge` keyword argument::
|
97 |
+
|
98 |
+
>>> from networkx import edge_betweenness_centrality
|
99 |
+
>>> from random import random
|
100 |
+
>>> def most_central_edge(G):
|
101 |
+
... centrality = edge_betweenness_centrality(G)
|
102 |
+
... max_cent = max(centrality.values())
|
103 |
+
... # Scale the centrality values so they are between 0 and 1,
|
104 |
+
... # and add some random noise.
|
105 |
+
... centrality = {e: c / max_cent for e, c in centrality.items()}
|
106 |
+
... # Add some random noise.
|
107 |
+
... centrality = {e: c + random() for e, c in centrality.items()}
|
108 |
+
... return max(centrality, key=centrality.get)
|
109 |
+
...
|
110 |
+
>>> G = nx.path_graph(10)
|
111 |
+
>>> comp = nx.community.girvan_newman(G, most_valuable_edge=most_central_edge)
|
112 |
+
|
113 |
+
Notes
|
114 |
+
-----
|
115 |
+
The Girvan–Newman algorithm detects communities by progressively
|
116 |
+
removing edges from the original graph. The algorithm removes the
|
117 |
+
"most valuable" edge, traditionally the edge with the highest
|
118 |
+
betweenness centrality, at each step. As the graph breaks down into
|
119 |
+
pieces, the tightly knit community structure is exposed and the
|
120 |
+
result can be depicted as a dendrogram.
|
121 |
+
|
122 |
+
"""
|
123 |
+
# If the graph is already empty, simply return its connected
|
124 |
+
# components.
|
125 |
+
if G.number_of_edges() == 0:
|
126 |
+
yield tuple(nx.connected_components(G))
|
127 |
+
return
|
128 |
+
# If no function is provided for computing the most valuable edge,
|
129 |
+
# use the edge betweenness centrality.
|
130 |
+
if most_valuable_edge is None:
|
131 |
+
|
132 |
+
def most_valuable_edge(G):
|
133 |
+
"""Returns the edge with the highest betweenness centrality
|
134 |
+
in the graph `G`.
|
135 |
+
|
136 |
+
"""
|
137 |
+
# We have guaranteed that the graph is non-empty, so this
|
138 |
+
# dictionary will never be empty.
|
139 |
+
betweenness = nx.edge_betweenness_centrality(G)
|
140 |
+
return max(betweenness, key=betweenness.get)
|
141 |
+
|
142 |
+
# The copy of G here must include the edge weight data.
|
143 |
+
g = G.copy().to_undirected()
|
144 |
+
# Self-loops must be removed because their removal has no effect on
|
145 |
+
# the connected components of the graph.
|
146 |
+
g.remove_edges_from(nx.selfloop_edges(g))
|
147 |
+
while g.number_of_edges() > 0:
|
148 |
+
yield _without_most_central_edges(g, most_valuable_edge)
|
149 |
+
|
150 |
+
|
151 |
+
def _without_most_central_edges(G, most_valuable_edge):
|
152 |
+
"""Returns the connected components of the graph that results from
|
153 |
+
repeatedly removing the most "valuable" edge in the graph.
|
154 |
+
|
155 |
+
`G` must be a non-empty graph. This function modifies the graph `G`
|
156 |
+
in-place; that is, it removes edges on the graph `G`.
|
157 |
+
|
158 |
+
`most_valuable_edge` is a function that takes the graph `G` as input
|
159 |
+
(or a subgraph with one or more edges of `G` removed) and returns an
|
160 |
+
edge. That edge will be removed and this process will be repeated
|
161 |
+
until the number of connected components in the graph increases.
|
162 |
+
|
163 |
+
"""
|
164 |
+
original_num_components = nx.number_connected_components(G)
|
165 |
+
num_new_components = original_num_components
|
166 |
+
while num_new_components <= original_num_components:
|
167 |
+
edge = most_valuable_edge(G)
|
168 |
+
G.remove_edge(*edge)
|
169 |
+
new_components = tuple(nx.connected_components(G))
|
170 |
+
num_new_components = len(new_components)
|
171 |
+
return new_components
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/community_utils.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Helper functions for community-finding algorithms."""
|
2 |
+
import networkx as nx
|
3 |
+
|
4 |
+
__all__ = ["is_partition"]
|
5 |
+
|
6 |
+
|
7 |
+
@nx._dispatchable
|
8 |
+
def is_partition(G, communities):
|
9 |
+
"""Returns *True* if `communities` is a partition of the nodes of `G`.
|
10 |
+
|
11 |
+
A partition of a universe set is a family of pairwise disjoint sets
|
12 |
+
whose union is the entire universe set.
|
13 |
+
|
14 |
+
Parameters
|
15 |
+
----------
|
16 |
+
G : NetworkX graph.
|
17 |
+
|
18 |
+
communities : list or iterable of sets of nodes
|
19 |
+
If not a list, the iterable is converted internally to a list.
|
20 |
+
If it is an iterator it is exhausted.
|
21 |
+
|
22 |
+
"""
|
23 |
+
# Alternate implementation:
|
24 |
+
# return all(sum(1 if v in c else 0 for c in communities) == 1 for v in G)
|
25 |
+
if not isinstance(communities, list):
|
26 |
+
communities = list(communities)
|
27 |
+
nodes = {n for c in communities for n in c if n in G}
|
28 |
+
|
29 |
+
return len(G) == len(nodes) == sum(len(c) for c in communities)
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/divisive.py
ADDED
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = [
|
6 |
+
"edge_betweenness_partition",
|
7 |
+
"edge_current_flow_betweenness_partition",
|
8 |
+
]
|
9 |
+
|
10 |
+
|
11 |
+
@nx._dispatchable(edge_attrs="weight")
|
12 |
+
def edge_betweenness_partition(G, number_of_sets, *, weight=None):
|
13 |
+
"""Partition created by iteratively removing the highest edge betweenness edge.
|
14 |
+
|
15 |
+
This algorithm works by calculating the edge betweenness for all
|
16 |
+
edges and removing the edge with the highest value. It is then
|
17 |
+
determined whether the graph has been broken into at least
|
18 |
+
`number_of_sets` connected components.
|
19 |
+
If not the process is repeated.
|
20 |
+
|
21 |
+
Parameters
|
22 |
+
----------
|
23 |
+
G : NetworkX Graph, DiGraph or MultiGraph
|
24 |
+
Graph to be partitioned
|
25 |
+
|
26 |
+
number_of_sets : int
|
27 |
+
Number of sets in the desired partition of the graph
|
28 |
+
|
29 |
+
weight : key, optional, default=None
|
30 |
+
The key to use if using weights for edge betweenness calculation
|
31 |
+
|
32 |
+
Returns
|
33 |
+
-------
|
34 |
+
C : list of sets
|
35 |
+
Partition of the nodes of G
|
36 |
+
|
37 |
+
Raises
|
38 |
+
------
|
39 |
+
NetworkXError
|
40 |
+
If number_of_sets is <= 0 or if number_of_sets > len(G)
|
41 |
+
|
42 |
+
Examples
|
43 |
+
--------
|
44 |
+
>>> G = nx.karate_club_graph()
|
45 |
+
>>> part = nx.community.edge_betweenness_partition(G, 2)
|
46 |
+
>>> {0, 1, 3, 4, 5, 6, 7, 10, 11, 12, 13, 16, 17, 19, 21} in part
|
47 |
+
True
|
48 |
+
>>> {2, 8, 9, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part
|
49 |
+
True
|
50 |
+
|
51 |
+
See Also
|
52 |
+
--------
|
53 |
+
edge_current_flow_betweenness_partition
|
54 |
+
|
55 |
+
Notes
|
56 |
+
-----
|
57 |
+
This algorithm is fairly slow, as both the calculation of connected
|
58 |
+
components and edge betweenness relies on all pairs shortest
|
59 |
+
path algorithms. They could potentially be combined to cut down
|
60 |
+
on overall computation time.
|
61 |
+
|
62 |
+
References
|
63 |
+
----------
|
64 |
+
.. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
|
65 |
+
Volume 486, Issue 3-5 p. 75-174
|
66 |
+
http://arxiv.org/abs/0906.0612
|
67 |
+
"""
|
68 |
+
if number_of_sets <= 0:
|
69 |
+
raise nx.NetworkXError("number_of_sets must be >0")
|
70 |
+
if number_of_sets == 1:
|
71 |
+
return [set(G)]
|
72 |
+
if number_of_sets == len(G):
|
73 |
+
return [{n} for n in G]
|
74 |
+
if number_of_sets > len(G):
|
75 |
+
raise nx.NetworkXError("number_of_sets must be <= len(G)")
|
76 |
+
|
77 |
+
H = G.copy()
|
78 |
+
partition = list(nx.connected_components(H))
|
79 |
+
while len(partition) < number_of_sets:
|
80 |
+
ranking = nx.edge_betweenness_centrality(H, weight=weight)
|
81 |
+
edge = max(ranking, key=ranking.get)
|
82 |
+
H.remove_edge(*edge)
|
83 |
+
partition = list(nx.connected_components(H))
|
84 |
+
return partition
|
85 |
+
|
86 |
+
|
87 |
+
@nx._dispatchable(edge_attrs="weight")
|
88 |
+
def edge_current_flow_betweenness_partition(G, number_of_sets, *, weight=None):
|
89 |
+
"""Partition created by removing the highest edge current flow betweenness edge.
|
90 |
+
|
91 |
+
This algorithm works by calculating the edge current flow
|
92 |
+
betweenness for all edges and removing the edge with the
|
93 |
+
highest value. It is then determined whether the graph has
|
94 |
+
been broken into at least `number_of_sets` connected
|
95 |
+
components. If not the process is repeated.
|
96 |
+
|
97 |
+
Parameters
|
98 |
+
----------
|
99 |
+
G : NetworkX Graph, DiGraph or MultiGraph
|
100 |
+
Graph to be partitioned
|
101 |
+
|
102 |
+
number_of_sets : int
|
103 |
+
Number of sets in the desired partition of the graph
|
104 |
+
|
105 |
+
weight : key, optional (default=None)
|
106 |
+
The edge attribute key to use as weights for
|
107 |
+
edge current flow betweenness calculations
|
108 |
+
|
109 |
+
Returns
|
110 |
+
-------
|
111 |
+
C : list of sets
|
112 |
+
Partition of G
|
113 |
+
|
114 |
+
Raises
|
115 |
+
------
|
116 |
+
NetworkXError
|
117 |
+
If number_of_sets is <= 0 or number_of_sets > len(G)
|
118 |
+
|
119 |
+
Examples
|
120 |
+
--------
|
121 |
+
>>> G = nx.karate_club_graph()
|
122 |
+
>>> part = nx.community.edge_current_flow_betweenness_partition(G, 2)
|
123 |
+
>>> {0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 16, 17, 19, 21} in part
|
124 |
+
True
|
125 |
+
>>> {8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part
|
126 |
+
True
|
127 |
+
|
128 |
+
|
129 |
+
See Also
|
130 |
+
--------
|
131 |
+
edge_betweenness_partition
|
132 |
+
|
133 |
+
Notes
|
134 |
+
-----
|
135 |
+
This algorithm is extremely slow, as the recalculation of the edge
|
136 |
+
current flow betweenness is extremely slow.
|
137 |
+
|
138 |
+
References
|
139 |
+
----------
|
140 |
+
.. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
|
141 |
+
Volume 486, Issue 3-5 p. 75-174
|
142 |
+
http://arxiv.org/abs/0906.0612
|
143 |
+
"""
|
144 |
+
if number_of_sets <= 0:
|
145 |
+
raise nx.NetworkXError("number_of_sets must be >0")
|
146 |
+
elif number_of_sets == 1:
|
147 |
+
return [set(G)]
|
148 |
+
elif number_of_sets == len(G):
|
149 |
+
return [{n} for n in G]
|
150 |
+
elif number_of_sets > len(G):
|
151 |
+
raise nx.NetworkXError("number_of_sets must be <= len(G)")
|
152 |
+
|
153 |
+
rank = functools.partial(
|
154 |
+
nx.edge_current_flow_betweenness_centrality, normalized=False, weight=weight
|
155 |
+
)
|
156 |
+
|
157 |
+
# current flow requires a connected network so we track the components explicitly
|
158 |
+
H = G.copy()
|
159 |
+
partition = list(nx.connected_components(H))
|
160 |
+
if len(partition) > 1:
|
161 |
+
Hcc_subgraphs = [H.subgraph(cc).copy() for cc in partition]
|
162 |
+
else:
|
163 |
+
Hcc_subgraphs = [H]
|
164 |
+
|
165 |
+
ranking = {}
|
166 |
+
for Hcc in Hcc_subgraphs:
|
167 |
+
ranking.update(rank(Hcc))
|
168 |
+
|
169 |
+
while len(partition) < number_of_sets:
|
170 |
+
edge = max(ranking, key=ranking.get)
|
171 |
+
for cc, Hcc in zip(partition, Hcc_subgraphs):
|
172 |
+
if edge[0] in cc:
|
173 |
+
Hcc.remove_edge(*edge)
|
174 |
+
del ranking[edge]
|
175 |
+
splitcc_list = list(nx.connected_components(Hcc))
|
176 |
+
if len(splitcc_list) > 1:
|
177 |
+
# there are 2 connected components. split off smaller one
|
178 |
+
cc_new = min(splitcc_list, key=len)
|
179 |
+
Hcc_new = Hcc.subgraph(cc_new).copy()
|
180 |
+
# update edge rankings for Hcc_new
|
181 |
+
newranks = rank(Hcc_new)
|
182 |
+
for e, r in newranks.items():
|
183 |
+
ranking[e if e in ranking else e[::-1]] = r
|
184 |
+
# append new cc and Hcc to their lists.
|
185 |
+
partition.append(cc_new)
|
186 |
+
Hcc_subgraphs.append(Hcc_new)
|
187 |
+
|
188 |
+
# leave existing cc and Hcc in their lists, but shrink them
|
189 |
+
Hcc.remove_nodes_from(cc_new)
|
190 |
+
cc.difference_update(cc_new)
|
191 |
+
# update edge rankings for Hcc whether it was split or not
|
192 |
+
newranks = rank(Hcc)
|
193 |
+
for e, r in newranks.items():
|
194 |
+
ranking[e if e in ranking else e[::-1]] = r
|
195 |
+
break
|
196 |
+
return partition
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/kclique.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections import defaultdict
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = ["k_clique_communities"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable
|
9 |
+
def k_clique_communities(G, k, cliques=None):
|
10 |
+
"""Find k-clique communities in graph using the percolation method.
|
11 |
+
|
12 |
+
A k-clique community is the union of all cliques of size k that
|
13 |
+
can be reached through adjacent (sharing k-1 nodes) k-cliques.
|
14 |
+
|
15 |
+
Parameters
|
16 |
+
----------
|
17 |
+
G : NetworkX graph
|
18 |
+
|
19 |
+
k : int
|
20 |
+
Size of smallest clique
|
21 |
+
|
22 |
+
cliques: list or generator
|
23 |
+
Precomputed cliques (use networkx.find_cliques(G))
|
24 |
+
|
25 |
+
Returns
|
26 |
+
-------
|
27 |
+
Yields sets of nodes, one for each k-clique community.
|
28 |
+
|
29 |
+
Examples
|
30 |
+
--------
|
31 |
+
>>> G = nx.complete_graph(5)
|
32 |
+
>>> K5 = nx.convert_node_labels_to_integers(G, first_label=2)
|
33 |
+
>>> G.add_edges_from(K5.edges())
|
34 |
+
>>> c = list(nx.community.k_clique_communities(G, 4))
|
35 |
+
>>> sorted(list(c[0]))
|
36 |
+
[0, 1, 2, 3, 4, 5, 6]
|
37 |
+
>>> list(nx.community.k_clique_communities(G, 6))
|
38 |
+
[]
|
39 |
+
|
40 |
+
References
|
41 |
+
----------
|
42 |
+
.. [1] Gergely Palla, Imre Derényi, Illés Farkas1, and Tamás Vicsek,
|
43 |
+
Uncovering the overlapping community structure of complex networks
|
44 |
+
in nature and society Nature 435, 814-818, 2005,
|
45 |
+
doi:10.1038/nature03607
|
46 |
+
"""
|
47 |
+
if k < 2:
|
48 |
+
raise nx.NetworkXError(f"k={k}, k must be greater than 1.")
|
49 |
+
if cliques is None:
|
50 |
+
cliques = nx.find_cliques(G)
|
51 |
+
cliques = [frozenset(c) for c in cliques if len(c) >= k]
|
52 |
+
|
53 |
+
# First index which nodes are in which cliques
|
54 |
+
membership_dict = defaultdict(list)
|
55 |
+
for clique in cliques:
|
56 |
+
for node in clique:
|
57 |
+
membership_dict[node].append(clique)
|
58 |
+
|
59 |
+
# For each clique, see which adjacent cliques percolate
|
60 |
+
perc_graph = nx.Graph()
|
61 |
+
perc_graph.add_nodes_from(cliques)
|
62 |
+
for clique in cliques:
|
63 |
+
for adj_clique in _get_adjacent_cliques(clique, membership_dict):
|
64 |
+
if len(clique.intersection(adj_clique)) >= (k - 1):
|
65 |
+
perc_graph.add_edge(clique, adj_clique)
|
66 |
+
|
67 |
+
# Connected components of clique graph with perc edges
|
68 |
+
# are the percolated cliques
|
69 |
+
for component in nx.connected_components(perc_graph):
|
70 |
+
yield (frozenset.union(*component))
|
71 |
+
|
72 |
+
|
73 |
+
def _get_adjacent_cliques(clique, membership_dict):
|
74 |
+
adjacent_cliques = set()
|
75 |
+
for n in clique:
|
76 |
+
for adj_clique in membership_dict[n]:
|
77 |
+
if clique != adj_clique:
|
78 |
+
adjacent_cliques.add(adj_clique)
|
79 |
+
return adjacent_cliques
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/kernighan_lin.py
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for computing the Kernighan–Lin bipartition algorithm."""
|
2 |
+
|
3 |
+
from itertools import count
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
from networkx.algorithms.community.community_utils import is_partition
|
7 |
+
from networkx.utils import BinaryHeap, not_implemented_for, py_random_state
|
8 |
+
|
9 |
+
__all__ = ["kernighan_lin_bisection"]
|
10 |
+
|
11 |
+
|
12 |
+
def _kernighan_lin_sweep(edges, side):
|
13 |
+
"""
|
14 |
+
This is a modified form of Kernighan-Lin, which moves single nodes at a
|
15 |
+
time, alternating between sides to keep the bisection balanced. We keep
|
16 |
+
two min-heaps of swap costs to make optimal-next-move selection fast.
|
17 |
+
"""
|
18 |
+
costs0, costs1 = costs = BinaryHeap(), BinaryHeap()
|
19 |
+
for u, side_u, edges_u in zip(count(), side, edges):
|
20 |
+
cost_u = sum(w if side[v] else -w for v, w in edges_u)
|
21 |
+
costs[side_u].insert(u, cost_u if side_u else -cost_u)
|
22 |
+
|
23 |
+
def _update_costs(costs_x, x):
|
24 |
+
for y, w in edges[x]:
|
25 |
+
costs_y = costs[side[y]]
|
26 |
+
cost_y = costs_y.get(y)
|
27 |
+
if cost_y is not None:
|
28 |
+
cost_y += 2 * (-w if costs_x is costs_y else w)
|
29 |
+
costs_y.insert(y, cost_y, True)
|
30 |
+
|
31 |
+
i = 0
|
32 |
+
totcost = 0
|
33 |
+
while costs0 and costs1:
|
34 |
+
u, cost_u = costs0.pop()
|
35 |
+
_update_costs(costs0, u)
|
36 |
+
v, cost_v = costs1.pop()
|
37 |
+
_update_costs(costs1, v)
|
38 |
+
totcost += cost_u + cost_v
|
39 |
+
i += 1
|
40 |
+
yield totcost, i, (u, v)
|
41 |
+
|
42 |
+
|
43 |
+
@not_implemented_for("directed")
|
44 |
+
@py_random_state(4)
|
45 |
+
@nx._dispatchable(edge_attrs="weight")
|
46 |
+
def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None):
|
47 |
+
"""Partition a graph into two blocks using the Kernighan–Lin
|
48 |
+
algorithm.
|
49 |
+
|
50 |
+
This algorithm partitions a network into two sets by iteratively
|
51 |
+
swapping pairs of nodes to reduce the edge cut between the two sets. The
|
52 |
+
pairs are chosen according to a modified form of Kernighan-Lin [1]_, which
|
53 |
+
moves node individually, alternating between sides to keep the bisection
|
54 |
+
balanced.
|
55 |
+
|
56 |
+
Parameters
|
57 |
+
----------
|
58 |
+
G : NetworkX graph
|
59 |
+
Graph must be undirected.
|
60 |
+
|
61 |
+
partition : tuple
|
62 |
+
Pair of iterables containing an initial partition. If not
|
63 |
+
specified, a random balanced partition is used.
|
64 |
+
|
65 |
+
max_iter : int
|
66 |
+
Maximum number of times to attempt swaps to find an
|
67 |
+
improvement before giving up.
|
68 |
+
|
69 |
+
weight : key
|
70 |
+
Edge data key to use as weight. If None, the weights are all
|
71 |
+
set to one.
|
72 |
+
|
73 |
+
seed : integer, random_state, or None (default)
|
74 |
+
Indicator of random number generation state.
|
75 |
+
See :ref:`Randomness<randomness>`.
|
76 |
+
Only used if partition is None
|
77 |
+
|
78 |
+
Returns
|
79 |
+
-------
|
80 |
+
partition : tuple
|
81 |
+
A pair of sets of nodes representing the bipartition.
|
82 |
+
|
83 |
+
Raises
|
84 |
+
------
|
85 |
+
NetworkXError
|
86 |
+
If partition is not a valid partition of the nodes of the graph.
|
87 |
+
|
88 |
+
References
|
89 |
+
----------
|
90 |
+
.. [1] Kernighan, B. W.; Lin, Shen (1970).
|
91 |
+
"An efficient heuristic procedure for partitioning graphs."
|
92 |
+
*Bell Systems Technical Journal* 49: 291--307.
|
93 |
+
Oxford University Press 2011.
|
94 |
+
|
95 |
+
"""
|
96 |
+
n = len(G)
|
97 |
+
labels = list(G)
|
98 |
+
seed.shuffle(labels)
|
99 |
+
index = {v: i for i, v in enumerate(labels)}
|
100 |
+
|
101 |
+
if partition is None:
|
102 |
+
side = [0] * (n // 2) + [1] * ((n + 1) // 2)
|
103 |
+
else:
|
104 |
+
try:
|
105 |
+
A, B = partition
|
106 |
+
except (TypeError, ValueError) as err:
|
107 |
+
raise nx.NetworkXError("partition must be two sets") from err
|
108 |
+
if not is_partition(G, (A, B)):
|
109 |
+
raise nx.NetworkXError("partition invalid")
|
110 |
+
side = [0] * n
|
111 |
+
for a in A:
|
112 |
+
side[index[a]] = 1
|
113 |
+
|
114 |
+
if G.is_multigraph():
|
115 |
+
edges = [
|
116 |
+
[
|
117 |
+
(index[u], sum(e.get(weight, 1) for e in d.values()))
|
118 |
+
for u, d in G[v].items()
|
119 |
+
]
|
120 |
+
for v in labels
|
121 |
+
]
|
122 |
+
else:
|
123 |
+
edges = [
|
124 |
+
[(index[u], e.get(weight, 1)) for u, e in G[v].items()] for v in labels
|
125 |
+
]
|
126 |
+
|
127 |
+
for i in range(max_iter):
|
128 |
+
costs = list(_kernighan_lin_sweep(edges, side))
|
129 |
+
min_cost, min_i, _ = min(costs)
|
130 |
+
if min_cost >= 0:
|
131 |
+
break
|
132 |
+
|
133 |
+
for _, _, (u, v) in costs[:min_i]:
|
134 |
+
side[u] = 1
|
135 |
+
side[v] = 0
|
136 |
+
|
137 |
+
A = {u for u, s in zip(labels, side) if s == 0}
|
138 |
+
B = {u for u, s in zip(labels, side) if s == 1}
|
139 |
+
return A, B
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/label_propagation.py
ADDED
@@ -0,0 +1,337 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Label propagation community detection algorithms.
|
3 |
+
"""
|
4 |
+
from collections import Counter, defaultdict, deque
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.utils import groups, not_implemented_for, py_random_state
|
8 |
+
|
9 |
+
__all__ = [
|
10 |
+
"label_propagation_communities",
|
11 |
+
"asyn_lpa_communities",
|
12 |
+
"fast_label_propagation_communities",
|
13 |
+
]
|
14 |
+
|
15 |
+
|
16 |
+
@py_random_state("seed")
|
17 |
+
@nx._dispatchable(edge_attrs="weight")
|
18 |
+
def fast_label_propagation_communities(G, *, weight=None, seed=None):
|
19 |
+
"""Returns communities in `G` as detected by fast label propagation.
|
20 |
+
|
21 |
+
The fast label propagation algorithm is described in [1]_. The algorithm is
|
22 |
+
probabilistic and the found communities may vary in different executions.
|
23 |
+
|
24 |
+
The algorithm operates as follows. First, the community label of each node is
|
25 |
+
set to a unique label. The algorithm then repeatedly updates the labels of
|
26 |
+
the nodes to the most frequent label in their neighborhood. In case of ties,
|
27 |
+
a random label is chosen from the most frequent labels.
|
28 |
+
|
29 |
+
The algorithm maintains a queue of nodes that still need to be processed.
|
30 |
+
Initially, all nodes are added to the queue in a random order. Then the nodes
|
31 |
+
are removed from the queue one by one and processed. If a node updates its label,
|
32 |
+
all its neighbors that have a different label are added to the queue (if not
|
33 |
+
already in the queue). The algorithm stops when the queue is empty.
|
34 |
+
|
35 |
+
Parameters
|
36 |
+
----------
|
37 |
+
G : Graph, DiGraph, MultiGraph, or MultiDiGraph
|
38 |
+
Any NetworkX graph.
|
39 |
+
|
40 |
+
weight : string, or None (default)
|
41 |
+
The edge attribute representing a non-negative weight of an edge. If None,
|
42 |
+
each edge is assumed to have weight one. The weight of an edge is used in
|
43 |
+
determining the frequency with which a label appears among the neighbors of
|
44 |
+
a node (edge with weight `w` is equivalent to `w` unweighted edges).
|
45 |
+
|
46 |
+
seed : integer, random_state, or None (default)
|
47 |
+
Indicator of random number generation state. See :ref:`Randomness<randomness>`.
|
48 |
+
|
49 |
+
Returns
|
50 |
+
-------
|
51 |
+
communities : iterable
|
52 |
+
Iterable of communities given as sets of nodes.
|
53 |
+
|
54 |
+
Notes
|
55 |
+
-----
|
56 |
+
Edge directions are ignored for directed graphs.
|
57 |
+
Edge weights must be non-negative numbers.
|
58 |
+
|
59 |
+
References
|
60 |
+
----------
|
61 |
+
.. [1] Vincent A. Traag & Lovro Šubelj. "Large network community detection by
|
62 |
+
fast label propagation." Scientific Reports 13 (2023): 2701.
|
63 |
+
https://doi.org/10.1038/s41598-023-29610-z
|
64 |
+
"""
|
65 |
+
|
66 |
+
# Queue of nodes to be processed.
|
67 |
+
nodes_queue = deque(G)
|
68 |
+
seed.shuffle(nodes_queue)
|
69 |
+
|
70 |
+
# Set of nodes in the queue.
|
71 |
+
nodes_set = set(G)
|
72 |
+
|
73 |
+
# Assign unique label to each node.
|
74 |
+
comms = {node: i for i, node in enumerate(G)}
|
75 |
+
|
76 |
+
while nodes_queue:
|
77 |
+
# Remove next node from the queue to process.
|
78 |
+
node = nodes_queue.popleft()
|
79 |
+
nodes_set.remove(node)
|
80 |
+
|
81 |
+
# Isolated nodes retain their initial label.
|
82 |
+
if G.degree(node) > 0:
|
83 |
+
# Compute frequency of labels in node's neighborhood.
|
84 |
+
label_freqs = _fast_label_count(G, comms, node, weight)
|
85 |
+
max_freq = max(label_freqs.values())
|
86 |
+
|
87 |
+
# Always sample new label from most frequent labels.
|
88 |
+
comm = seed.choice(
|
89 |
+
[comm for comm in label_freqs if label_freqs[comm] == max_freq]
|
90 |
+
)
|
91 |
+
|
92 |
+
if comms[node] != comm:
|
93 |
+
comms[node] = comm
|
94 |
+
|
95 |
+
# Add neighbors that have different label to the queue.
|
96 |
+
for nbr in nx.all_neighbors(G, node):
|
97 |
+
if comms[nbr] != comm and nbr not in nodes_set:
|
98 |
+
nodes_queue.append(nbr)
|
99 |
+
nodes_set.add(nbr)
|
100 |
+
|
101 |
+
yield from groups(comms).values()
|
102 |
+
|
103 |
+
|
104 |
+
def _fast_label_count(G, comms, node, weight=None):
|
105 |
+
"""Computes the frequency of labels in the neighborhood of a node.
|
106 |
+
|
107 |
+
Returns a dictionary keyed by label to the frequency of that label.
|
108 |
+
"""
|
109 |
+
|
110 |
+
if weight is None:
|
111 |
+
# Unweighted (un)directed simple graph.
|
112 |
+
if not G.is_multigraph():
|
113 |
+
label_freqs = Counter(map(comms.get, nx.all_neighbors(G, node)))
|
114 |
+
|
115 |
+
# Unweighted (un)directed multigraph.
|
116 |
+
else:
|
117 |
+
label_freqs = defaultdict(int)
|
118 |
+
for nbr in G[node]:
|
119 |
+
label_freqs[comms[nbr]] += len(G[node][nbr])
|
120 |
+
|
121 |
+
if G.is_directed():
|
122 |
+
for nbr in G.pred[node]:
|
123 |
+
label_freqs[comms[nbr]] += len(G.pred[node][nbr])
|
124 |
+
|
125 |
+
else:
|
126 |
+
# Weighted undirected simple/multigraph.
|
127 |
+
label_freqs = defaultdict(float)
|
128 |
+
for _, nbr, w in G.edges(node, data=weight, default=1):
|
129 |
+
label_freqs[comms[nbr]] += w
|
130 |
+
|
131 |
+
# Weighted directed simple/multigraph.
|
132 |
+
if G.is_directed():
|
133 |
+
for nbr, _, w in G.in_edges(node, data=weight, default=1):
|
134 |
+
label_freqs[comms[nbr]] += w
|
135 |
+
|
136 |
+
return label_freqs
|
137 |
+
|
138 |
+
|
139 |
+
@py_random_state(2)
|
140 |
+
@nx._dispatchable(edge_attrs="weight")
|
141 |
+
def asyn_lpa_communities(G, weight=None, seed=None):
|
142 |
+
"""Returns communities in `G` as detected by asynchronous label
|
143 |
+
propagation.
|
144 |
+
|
145 |
+
The asynchronous label propagation algorithm is described in
|
146 |
+
[1]_. The algorithm is probabilistic and the found communities may
|
147 |
+
vary on different executions.
|
148 |
+
|
149 |
+
The algorithm proceeds as follows. After initializing each node with
|
150 |
+
a unique label, the algorithm repeatedly sets the label of a node to
|
151 |
+
be the label that appears most frequently among that nodes
|
152 |
+
neighbors. The algorithm halts when each node has the label that
|
153 |
+
appears most frequently among its neighbors. The algorithm is
|
154 |
+
asynchronous because each node is updated without waiting for
|
155 |
+
updates on the remaining nodes.
|
156 |
+
|
157 |
+
This generalized version of the algorithm in [1]_ accepts edge
|
158 |
+
weights.
|
159 |
+
|
160 |
+
Parameters
|
161 |
+
----------
|
162 |
+
G : Graph
|
163 |
+
|
164 |
+
weight : string
|
165 |
+
The edge attribute representing the weight of an edge.
|
166 |
+
If None, each edge is assumed to have weight one. In this
|
167 |
+
algorithm, the weight of an edge is used in determining the
|
168 |
+
frequency with which a label appears among the neighbors of a
|
169 |
+
node: a higher weight means the label appears more often.
|
170 |
+
|
171 |
+
seed : integer, random_state, or None (default)
|
172 |
+
Indicator of random number generation state.
|
173 |
+
See :ref:`Randomness<randomness>`.
|
174 |
+
|
175 |
+
Returns
|
176 |
+
-------
|
177 |
+
communities : iterable
|
178 |
+
Iterable of communities given as sets of nodes.
|
179 |
+
|
180 |
+
Notes
|
181 |
+
-----
|
182 |
+
Edge weight attributes must be numerical.
|
183 |
+
|
184 |
+
References
|
185 |
+
----------
|
186 |
+
.. [1] Raghavan, Usha Nandini, Réka Albert, and Soundar Kumara. "Near
|
187 |
+
linear time algorithm to detect community structures in large-scale
|
188 |
+
networks." Physical Review E 76.3 (2007): 036106.
|
189 |
+
"""
|
190 |
+
|
191 |
+
labels = {n: i for i, n in enumerate(G)}
|
192 |
+
cont = True
|
193 |
+
|
194 |
+
while cont:
|
195 |
+
cont = False
|
196 |
+
nodes = list(G)
|
197 |
+
seed.shuffle(nodes)
|
198 |
+
|
199 |
+
for node in nodes:
|
200 |
+
if not G[node]:
|
201 |
+
continue
|
202 |
+
|
203 |
+
# Get label frequencies among adjacent nodes.
|
204 |
+
# Depending on the order they are processed in,
|
205 |
+
# some nodes will be in iteration t and others in t-1,
|
206 |
+
# making the algorithm asynchronous.
|
207 |
+
if weight is None:
|
208 |
+
# initialising a Counter from an iterator of labels is
|
209 |
+
# faster for getting unweighted label frequencies
|
210 |
+
label_freq = Counter(map(labels.get, G[node]))
|
211 |
+
else:
|
212 |
+
# updating a defaultdict is substantially faster
|
213 |
+
# for getting weighted label frequencies
|
214 |
+
label_freq = defaultdict(float)
|
215 |
+
for _, v, wt in G.edges(node, data=weight, default=1):
|
216 |
+
label_freq[labels[v]] += wt
|
217 |
+
|
218 |
+
# Get the labels that appear with maximum frequency.
|
219 |
+
max_freq = max(label_freq.values())
|
220 |
+
best_labels = [
|
221 |
+
label for label, freq in label_freq.items() if freq == max_freq
|
222 |
+
]
|
223 |
+
|
224 |
+
# If the node does not have one of the maximum frequency labels,
|
225 |
+
# randomly choose one of them and update the node's label.
|
226 |
+
# Continue the iteration as long as at least one node
|
227 |
+
# doesn't have a maximum frequency label.
|
228 |
+
if labels[node] not in best_labels:
|
229 |
+
labels[node] = seed.choice(best_labels)
|
230 |
+
cont = True
|
231 |
+
|
232 |
+
yield from groups(labels).values()
|
233 |
+
|
234 |
+
|
235 |
+
@not_implemented_for("directed")
|
236 |
+
@nx._dispatchable
|
237 |
+
def label_propagation_communities(G):
|
238 |
+
"""Generates community sets determined by label propagation
|
239 |
+
|
240 |
+
Finds communities in `G` using a semi-synchronous label propagation
|
241 |
+
method [1]_. This method combines the advantages of both the synchronous
|
242 |
+
and asynchronous models. Not implemented for directed graphs.
|
243 |
+
|
244 |
+
Parameters
|
245 |
+
----------
|
246 |
+
G : graph
|
247 |
+
An undirected NetworkX graph.
|
248 |
+
|
249 |
+
Returns
|
250 |
+
-------
|
251 |
+
communities : iterable
|
252 |
+
A dict_values object that contains a set of nodes for each community.
|
253 |
+
|
254 |
+
Raises
|
255 |
+
------
|
256 |
+
NetworkXNotImplemented
|
257 |
+
If the graph is directed
|
258 |
+
|
259 |
+
References
|
260 |
+
----------
|
261 |
+
.. [1] Cordasco, G., & Gargano, L. (2010, December). Community detection
|
262 |
+
via semi-synchronous label propagation algorithms. In Business
|
263 |
+
Applications of Social Network Analysis (BASNA), 2010 IEEE International
|
264 |
+
Workshop on (pp. 1-8). IEEE.
|
265 |
+
"""
|
266 |
+
coloring = _color_network(G)
|
267 |
+
# Create a unique label for each node in the graph
|
268 |
+
labeling = {v: k for k, v in enumerate(G)}
|
269 |
+
while not _labeling_complete(labeling, G):
|
270 |
+
# Update the labels of every node with the same color.
|
271 |
+
for color, nodes in coloring.items():
|
272 |
+
for n in nodes:
|
273 |
+
_update_label(n, labeling, G)
|
274 |
+
|
275 |
+
clusters = defaultdict(set)
|
276 |
+
for node, label in labeling.items():
|
277 |
+
clusters[label].add(node)
|
278 |
+
return clusters.values()
|
279 |
+
|
280 |
+
|
281 |
+
def _color_network(G):
|
282 |
+
"""Colors the network so that neighboring nodes all have distinct colors.
|
283 |
+
|
284 |
+
Returns a dict keyed by color to a set of nodes with that color.
|
285 |
+
"""
|
286 |
+
coloring = {} # color => set(node)
|
287 |
+
colors = nx.coloring.greedy_color(G)
|
288 |
+
for node, color in colors.items():
|
289 |
+
if color in coloring:
|
290 |
+
coloring[color].add(node)
|
291 |
+
else:
|
292 |
+
coloring[color] = {node}
|
293 |
+
return coloring
|
294 |
+
|
295 |
+
|
296 |
+
def _labeling_complete(labeling, G):
|
297 |
+
"""Determines whether or not LPA is done.
|
298 |
+
|
299 |
+
Label propagation is complete when all nodes have a label that is
|
300 |
+
in the set of highest frequency labels amongst its neighbors.
|
301 |
+
|
302 |
+
Nodes with no neighbors are considered complete.
|
303 |
+
"""
|
304 |
+
return all(
|
305 |
+
labeling[v] in _most_frequent_labels(v, labeling, G) for v in G if len(G[v]) > 0
|
306 |
+
)
|
307 |
+
|
308 |
+
|
309 |
+
def _most_frequent_labels(node, labeling, G):
|
310 |
+
"""Returns a set of all labels with maximum frequency in `labeling`.
|
311 |
+
|
312 |
+
Input `labeling` should be a dict keyed by node to labels.
|
313 |
+
"""
|
314 |
+
if not G[node]:
|
315 |
+
# Nodes with no neighbors are themselves a community and are labeled
|
316 |
+
# accordingly, hence the immediate if statement.
|
317 |
+
return {labeling[node]}
|
318 |
+
|
319 |
+
# Compute the frequencies of all neighbors of node
|
320 |
+
freqs = Counter(labeling[q] for q in G[node])
|
321 |
+
max_freq = max(freqs.values())
|
322 |
+
return {label for label, freq in freqs.items() if freq == max_freq}
|
323 |
+
|
324 |
+
|
325 |
+
def _update_label(node, labeling, G):
|
326 |
+
"""Updates the label of a node using the Prec-Max tie breaking algorithm
|
327 |
+
|
328 |
+
The algorithm is explained in: 'Community Detection via Semi-Synchronous
|
329 |
+
Label Propagation Algorithms' Cordasco and Gargano, 2011
|
330 |
+
"""
|
331 |
+
high_labels = _most_frequent_labels(node, labeling, G)
|
332 |
+
if len(high_labels) == 1:
|
333 |
+
labeling[node] = high_labels.pop()
|
334 |
+
elif len(high_labels) > 1:
|
335 |
+
# Prec-Max
|
336 |
+
if labeling[node] not in high_labels:
|
337 |
+
labeling[node] = max(high_labels)
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/louvain.py
ADDED
@@ -0,0 +1,382 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Function for detecting communities based on Louvain Community Detection
|
2 |
+
Algorithm"""
|
3 |
+
|
4 |
+
import itertools
|
5 |
+
from collections import defaultdict, deque
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
from networkx.algorithms.community import modularity
|
9 |
+
from networkx.utils import py_random_state
|
10 |
+
|
11 |
+
__all__ = ["louvain_communities", "louvain_partitions"]
|
12 |
+
|
13 |
+
|
14 |
+
@py_random_state("seed")
|
15 |
+
@nx._dispatchable(edge_attrs="weight")
|
16 |
+
def louvain_communities(
|
17 |
+
G, weight="weight", resolution=1, threshold=0.0000001, max_level=None, seed=None
|
18 |
+
):
|
19 |
+
r"""Find the best partition of a graph using the Louvain Community Detection
|
20 |
+
Algorithm.
|
21 |
+
|
22 |
+
Louvain Community Detection Algorithm is a simple method to extract the community
|
23 |
+
structure of a network. This is a heuristic method based on modularity optimization. [1]_
|
24 |
+
|
25 |
+
The algorithm works in 2 steps. On the first step it assigns every node to be
|
26 |
+
in its own community and then for each node it tries to find the maximum positive
|
27 |
+
modularity gain by moving each node to all of its neighbor communities. If no positive
|
28 |
+
gain is achieved the node remains in its original community.
|
29 |
+
|
30 |
+
The modularity gain obtained by moving an isolated node $i$ into a community $C$ can
|
31 |
+
easily be calculated by the following formula (combining [1]_ [2]_ and some algebra):
|
32 |
+
|
33 |
+
.. math::
|
34 |
+
\Delta Q = \frac{k_{i,in}}{2m} - \gamma\frac{ \Sigma_{tot} \cdot k_i}{2m^2}
|
35 |
+
|
36 |
+
where $m$ is the size of the graph, $k_{i,in}$ is the sum of the weights of the links
|
37 |
+
from $i$ to nodes in $C$, $k_i$ is the sum of the weights of the links incident to node $i$,
|
38 |
+
$\Sigma_{tot}$ is the sum of the weights of the links incident to nodes in $C$ and $\gamma$
|
39 |
+
is the resolution parameter.
|
40 |
+
|
41 |
+
For the directed case the modularity gain can be computed using this formula according to [3]_
|
42 |
+
|
43 |
+
.. math::
|
44 |
+
\Delta Q = \frac{k_{i,in}}{m}
|
45 |
+
- \gamma\frac{k_i^{out} \cdot\Sigma_{tot}^{in} + k_i^{in} \cdot \Sigma_{tot}^{out}}{m^2}
|
46 |
+
|
47 |
+
where $k_i^{out}$, $k_i^{in}$ are the outer and inner weighted degrees of node $i$ and
|
48 |
+
$\Sigma_{tot}^{in}$, $\Sigma_{tot}^{out}$ are the sum of in-going and out-going links incident
|
49 |
+
to nodes in $C$.
|
50 |
+
|
51 |
+
The first phase continues until no individual move can improve the modularity.
|
52 |
+
|
53 |
+
The second phase consists in building a new network whose nodes are now the communities
|
54 |
+
found in the first phase. To do so, the weights of the links between the new nodes are given by
|
55 |
+
the sum of the weight of the links between nodes in the corresponding two communities. Once this
|
56 |
+
phase is complete it is possible to reapply the first phase creating bigger communities with
|
57 |
+
increased modularity.
|
58 |
+
|
59 |
+
The above two phases are executed until no modularity gain is achieved (or is less than
|
60 |
+
the `threshold`, or until `max_levels` is reached).
|
61 |
+
|
62 |
+
Be careful with self-loops in the input graph. These are treated as
|
63 |
+
previously reduced communities -- as if the process had been started
|
64 |
+
in the middle of the algorithm. Large self-loop edge weights thus
|
65 |
+
represent strong communities and in practice may be hard to add
|
66 |
+
other nodes to. If your input graph edge weights for self-loops
|
67 |
+
do not represent already reduced communities you may want to remove
|
68 |
+
the self-loops before inputting that graph.
|
69 |
+
|
70 |
+
Parameters
|
71 |
+
----------
|
72 |
+
G : NetworkX graph
|
73 |
+
weight : string or None, optional (default="weight")
|
74 |
+
The name of an edge attribute that holds the numerical value
|
75 |
+
used as a weight. If None then each edge has weight 1.
|
76 |
+
resolution : float, optional (default=1)
|
77 |
+
If resolution is less than 1, the algorithm favors larger communities.
|
78 |
+
Greater than 1 favors smaller communities
|
79 |
+
threshold : float, optional (default=0.0000001)
|
80 |
+
Modularity gain threshold for each level. If the gain of modularity
|
81 |
+
between 2 levels of the algorithm is less than the given threshold
|
82 |
+
then the algorithm stops and returns the resulting communities.
|
83 |
+
max_level : int or None, optional (default=None)
|
84 |
+
The maximum number of levels (steps of the algorithm) to compute.
|
85 |
+
Must be a positive integer or None. If None, then there is no max
|
86 |
+
level and the threshold parameter determines the stopping condition.
|
87 |
+
seed : integer, random_state, or None (default)
|
88 |
+
Indicator of random number generation state.
|
89 |
+
See :ref:`Randomness<randomness>`.
|
90 |
+
|
91 |
+
Returns
|
92 |
+
-------
|
93 |
+
list
|
94 |
+
A list of sets (partition of `G`). Each set represents one community and contains
|
95 |
+
all the nodes that constitute it.
|
96 |
+
|
97 |
+
Examples
|
98 |
+
--------
|
99 |
+
>>> import networkx as nx
|
100 |
+
>>> G = nx.petersen_graph()
|
101 |
+
>>> nx.community.louvain_communities(G, seed=123)
|
102 |
+
[{0, 4, 5, 7, 9}, {1, 2, 3, 6, 8}]
|
103 |
+
|
104 |
+
Notes
|
105 |
+
-----
|
106 |
+
The order in which the nodes are considered can affect the final output. In the algorithm
|
107 |
+
the ordering happens using a random shuffle.
|
108 |
+
|
109 |
+
References
|
110 |
+
----------
|
111 |
+
.. [1] Blondel, V.D. et al. Fast unfolding of communities in
|
112 |
+
large networks. J. Stat. Mech 10008, 1-12(2008). https://doi.org/10.1088/1742-5468/2008/10/P10008
|
113 |
+
.. [2] Traag, V.A., Waltman, L. & van Eck, N.J. From Louvain to Leiden: guaranteeing
|
114 |
+
well-connected communities. Sci Rep 9, 5233 (2019). https://doi.org/10.1038/s41598-019-41695-z
|
115 |
+
.. [3] Nicolas Dugué, Anthony Perez. Directed Louvain : maximizing modularity in directed networks.
|
116 |
+
[Research Report] Université d’Orléans. 2015. hal-01231784. https://hal.archives-ouvertes.fr/hal-01231784
|
117 |
+
|
118 |
+
See Also
|
119 |
+
--------
|
120 |
+
louvain_partitions
|
121 |
+
"""
|
122 |
+
|
123 |
+
partitions = louvain_partitions(G, weight, resolution, threshold, seed)
|
124 |
+
if max_level is not None:
|
125 |
+
if max_level <= 0:
|
126 |
+
raise ValueError("max_level argument must be a positive integer or None")
|
127 |
+
partitions = itertools.islice(partitions, max_level)
|
128 |
+
final_partition = deque(partitions, maxlen=1)
|
129 |
+
return final_partition.pop()
|
130 |
+
|
131 |
+
|
132 |
+
@py_random_state("seed")
|
133 |
+
@nx._dispatchable(edge_attrs="weight")
|
134 |
+
def louvain_partitions(
|
135 |
+
G, weight="weight", resolution=1, threshold=0.0000001, seed=None
|
136 |
+
):
|
137 |
+
"""Yields partitions for each level of the Louvain Community Detection Algorithm
|
138 |
+
|
139 |
+
Louvain Community Detection Algorithm is a simple method to extract the community
|
140 |
+
structure of a network. This is a heuristic method based on modularity optimization. [1]_
|
141 |
+
|
142 |
+
The partitions at each level (step of the algorithm) form a dendrogram of communities.
|
143 |
+
A dendrogram is a diagram representing a tree and each level represents
|
144 |
+
a partition of the G graph. The top level contains the smallest communities
|
145 |
+
and as you traverse to the bottom of the tree the communities get bigger
|
146 |
+
and the overall modularity increases making the partition better.
|
147 |
+
|
148 |
+
Each level is generated by executing the two phases of the Louvain Community
|
149 |
+
Detection Algorithm.
|
150 |
+
|
151 |
+
Be careful with self-loops in the input graph. These are treated as
|
152 |
+
previously reduced communities -- as if the process had been started
|
153 |
+
in the middle of the algorithm. Large self-loop edge weights thus
|
154 |
+
represent strong communities and in practice may be hard to add
|
155 |
+
other nodes to. If your input graph edge weights for self-loops
|
156 |
+
do not represent already reduced communities you may want to remove
|
157 |
+
the self-loops before inputting that graph.
|
158 |
+
|
159 |
+
Parameters
|
160 |
+
----------
|
161 |
+
G : NetworkX graph
|
162 |
+
weight : string or None, optional (default="weight")
|
163 |
+
The name of an edge attribute that holds the numerical value
|
164 |
+
used as a weight. If None then each edge has weight 1.
|
165 |
+
resolution : float, optional (default=1)
|
166 |
+
If resolution is less than 1, the algorithm favors larger communities.
|
167 |
+
Greater than 1 favors smaller communities
|
168 |
+
threshold : float, optional (default=0.0000001)
|
169 |
+
Modularity gain threshold for each level. If the gain of modularity
|
170 |
+
between 2 levels of the algorithm is less than the given threshold
|
171 |
+
then the algorithm stops and returns the resulting communities.
|
172 |
+
seed : integer, random_state, or None (default)
|
173 |
+
Indicator of random number generation state.
|
174 |
+
See :ref:`Randomness<randomness>`.
|
175 |
+
|
176 |
+
Yields
|
177 |
+
------
|
178 |
+
list
|
179 |
+
A list of sets (partition of `G`). Each set represents one community and contains
|
180 |
+
all the nodes that constitute it.
|
181 |
+
|
182 |
+
References
|
183 |
+
----------
|
184 |
+
.. [1] Blondel, V.D. et al. Fast unfolding of communities in
|
185 |
+
large networks. J. Stat. Mech 10008, 1-12(2008)
|
186 |
+
|
187 |
+
See Also
|
188 |
+
--------
|
189 |
+
louvain_communities
|
190 |
+
"""
|
191 |
+
|
192 |
+
partition = [{u} for u in G.nodes()]
|
193 |
+
if nx.is_empty(G):
|
194 |
+
yield partition
|
195 |
+
return
|
196 |
+
mod = modularity(G, partition, resolution=resolution, weight=weight)
|
197 |
+
is_directed = G.is_directed()
|
198 |
+
if G.is_multigraph():
|
199 |
+
graph = _convert_multigraph(G, weight, is_directed)
|
200 |
+
else:
|
201 |
+
graph = G.__class__()
|
202 |
+
graph.add_nodes_from(G)
|
203 |
+
graph.add_weighted_edges_from(G.edges(data=weight, default=1))
|
204 |
+
|
205 |
+
m = graph.size(weight="weight")
|
206 |
+
partition, inner_partition, improvement = _one_level(
|
207 |
+
graph, m, partition, resolution, is_directed, seed
|
208 |
+
)
|
209 |
+
improvement = True
|
210 |
+
while improvement:
|
211 |
+
# gh-5901 protect the sets in the yielded list from further manipulation here
|
212 |
+
yield [s.copy() for s in partition]
|
213 |
+
new_mod = modularity(
|
214 |
+
graph, inner_partition, resolution=resolution, weight="weight"
|
215 |
+
)
|
216 |
+
if new_mod - mod <= threshold:
|
217 |
+
return
|
218 |
+
mod = new_mod
|
219 |
+
graph = _gen_graph(graph, inner_partition)
|
220 |
+
partition, inner_partition, improvement = _one_level(
|
221 |
+
graph, m, partition, resolution, is_directed, seed
|
222 |
+
)
|
223 |
+
|
224 |
+
|
225 |
+
def _one_level(G, m, partition, resolution=1, is_directed=False, seed=None):
|
226 |
+
"""Calculate one level of the Louvain partitions tree
|
227 |
+
|
228 |
+
Parameters
|
229 |
+
----------
|
230 |
+
G : NetworkX Graph/DiGraph
|
231 |
+
The graph from which to detect communities
|
232 |
+
m : number
|
233 |
+
The size of the graph `G`.
|
234 |
+
partition : list of sets of nodes
|
235 |
+
A valid partition of the graph `G`
|
236 |
+
resolution : positive number
|
237 |
+
The resolution parameter for computing the modularity of a partition
|
238 |
+
is_directed : bool
|
239 |
+
True if `G` is a directed graph.
|
240 |
+
seed : integer, random_state, or None (default)
|
241 |
+
Indicator of random number generation state.
|
242 |
+
See :ref:`Randomness<randomness>`.
|
243 |
+
|
244 |
+
"""
|
245 |
+
node2com = {u: i for i, u in enumerate(G.nodes())}
|
246 |
+
inner_partition = [{u} for u in G.nodes()]
|
247 |
+
if is_directed:
|
248 |
+
in_degrees = dict(G.in_degree(weight="weight"))
|
249 |
+
out_degrees = dict(G.out_degree(weight="weight"))
|
250 |
+
Stot_in = list(in_degrees.values())
|
251 |
+
Stot_out = list(out_degrees.values())
|
252 |
+
# Calculate weights for both in and out neighbors without considering self-loops
|
253 |
+
nbrs = {}
|
254 |
+
for u in G:
|
255 |
+
nbrs[u] = defaultdict(float)
|
256 |
+
for _, n, wt in G.out_edges(u, data="weight"):
|
257 |
+
if u != n:
|
258 |
+
nbrs[u][n] += wt
|
259 |
+
for n, _, wt in G.in_edges(u, data="weight"):
|
260 |
+
if u != n:
|
261 |
+
nbrs[u][n] += wt
|
262 |
+
else:
|
263 |
+
degrees = dict(G.degree(weight="weight"))
|
264 |
+
Stot = list(degrees.values())
|
265 |
+
nbrs = {u: {v: data["weight"] for v, data in G[u].items() if v != u} for u in G}
|
266 |
+
rand_nodes = list(G.nodes)
|
267 |
+
seed.shuffle(rand_nodes)
|
268 |
+
nb_moves = 1
|
269 |
+
improvement = False
|
270 |
+
while nb_moves > 0:
|
271 |
+
nb_moves = 0
|
272 |
+
for u in rand_nodes:
|
273 |
+
best_mod = 0
|
274 |
+
best_com = node2com[u]
|
275 |
+
weights2com = _neighbor_weights(nbrs[u], node2com)
|
276 |
+
if is_directed:
|
277 |
+
in_degree = in_degrees[u]
|
278 |
+
out_degree = out_degrees[u]
|
279 |
+
Stot_in[best_com] -= in_degree
|
280 |
+
Stot_out[best_com] -= out_degree
|
281 |
+
remove_cost = (
|
282 |
+
-weights2com[best_com] / m
|
283 |
+
+ resolution
|
284 |
+
* (out_degree * Stot_in[best_com] + in_degree * Stot_out[best_com])
|
285 |
+
/ m**2
|
286 |
+
)
|
287 |
+
else:
|
288 |
+
degree = degrees[u]
|
289 |
+
Stot[best_com] -= degree
|
290 |
+
remove_cost = -weights2com[best_com] / m + resolution * (
|
291 |
+
Stot[best_com] * degree
|
292 |
+
) / (2 * m**2)
|
293 |
+
for nbr_com, wt in weights2com.items():
|
294 |
+
if is_directed:
|
295 |
+
gain = (
|
296 |
+
remove_cost
|
297 |
+
+ wt / m
|
298 |
+
- resolution
|
299 |
+
* (
|
300 |
+
out_degree * Stot_in[nbr_com]
|
301 |
+
+ in_degree * Stot_out[nbr_com]
|
302 |
+
)
|
303 |
+
/ m**2
|
304 |
+
)
|
305 |
+
else:
|
306 |
+
gain = (
|
307 |
+
remove_cost
|
308 |
+
+ wt / m
|
309 |
+
- resolution * (Stot[nbr_com] * degree) / (2 * m**2)
|
310 |
+
)
|
311 |
+
if gain > best_mod:
|
312 |
+
best_mod = gain
|
313 |
+
best_com = nbr_com
|
314 |
+
if is_directed:
|
315 |
+
Stot_in[best_com] += in_degree
|
316 |
+
Stot_out[best_com] += out_degree
|
317 |
+
else:
|
318 |
+
Stot[best_com] += degree
|
319 |
+
if best_com != node2com[u]:
|
320 |
+
com = G.nodes[u].get("nodes", {u})
|
321 |
+
partition[node2com[u]].difference_update(com)
|
322 |
+
inner_partition[node2com[u]].remove(u)
|
323 |
+
partition[best_com].update(com)
|
324 |
+
inner_partition[best_com].add(u)
|
325 |
+
improvement = True
|
326 |
+
nb_moves += 1
|
327 |
+
node2com[u] = best_com
|
328 |
+
partition = list(filter(len, partition))
|
329 |
+
inner_partition = list(filter(len, inner_partition))
|
330 |
+
return partition, inner_partition, improvement
|
331 |
+
|
332 |
+
|
333 |
+
def _neighbor_weights(nbrs, node2com):
|
334 |
+
"""Calculate weights between node and its neighbor communities.
|
335 |
+
|
336 |
+
Parameters
|
337 |
+
----------
|
338 |
+
nbrs : dictionary
|
339 |
+
Dictionary with nodes' neighbors as keys and their edge weight as value.
|
340 |
+
node2com : dictionary
|
341 |
+
Dictionary with all graph's nodes as keys and their community index as value.
|
342 |
+
|
343 |
+
"""
|
344 |
+
weights = defaultdict(float)
|
345 |
+
for nbr, wt in nbrs.items():
|
346 |
+
weights[node2com[nbr]] += wt
|
347 |
+
return weights
|
348 |
+
|
349 |
+
|
350 |
+
def _gen_graph(G, partition):
|
351 |
+
"""Generate a new graph based on the partitions of a given graph"""
|
352 |
+
H = G.__class__()
|
353 |
+
node2com = {}
|
354 |
+
for i, part in enumerate(partition):
|
355 |
+
nodes = set()
|
356 |
+
for node in part:
|
357 |
+
node2com[node] = i
|
358 |
+
nodes.update(G.nodes[node].get("nodes", {node}))
|
359 |
+
H.add_node(i, nodes=nodes)
|
360 |
+
|
361 |
+
for node1, node2, wt in G.edges(data=True):
|
362 |
+
wt = wt["weight"]
|
363 |
+
com1 = node2com[node1]
|
364 |
+
com2 = node2com[node2]
|
365 |
+
temp = H.get_edge_data(com1, com2, {"weight": 0})["weight"]
|
366 |
+
H.add_edge(com1, com2, weight=wt + temp)
|
367 |
+
return H
|
368 |
+
|
369 |
+
|
370 |
+
def _convert_multigraph(G, weight, is_directed):
|
371 |
+
"""Convert a Multigraph to normal Graph"""
|
372 |
+
if is_directed:
|
373 |
+
H = nx.DiGraph()
|
374 |
+
else:
|
375 |
+
H = nx.Graph()
|
376 |
+
H.add_nodes_from(G)
|
377 |
+
for u, v, wt in G.edges(data=weight, default=1):
|
378 |
+
if H.has_edge(u, v):
|
379 |
+
H[u][v]["weight"] += wt
|
380 |
+
else:
|
381 |
+
H.add_edge(u, v, weight=wt)
|
382 |
+
return H
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/lukes.py
ADDED
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Lukes Algorithm for exact optimal weighted tree partitioning."""
|
2 |
+
|
3 |
+
from copy import deepcopy
|
4 |
+
from functools import lru_cache
|
5 |
+
from random import choice
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
from networkx.utils import not_implemented_for
|
9 |
+
|
10 |
+
__all__ = ["lukes_partitioning"]
|
11 |
+
|
12 |
+
D_EDGE_W = "weight"
|
13 |
+
D_EDGE_VALUE = 1.0
|
14 |
+
D_NODE_W = "weight"
|
15 |
+
D_NODE_VALUE = 1
|
16 |
+
PKEY = "partitions"
|
17 |
+
CLUSTER_EVAL_CACHE_SIZE = 2048
|
18 |
+
|
19 |
+
|
20 |
+
def _split_n_from(n, min_size_of_first_part):
|
21 |
+
# splits j in two parts of which the first is at least
|
22 |
+
# the second argument
|
23 |
+
assert n >= min_size_of_first_part
|
24 |
+
for p1 in range(min_size_of_first_part, n + 1):
|
25 |
+
yield p1, n - p1
|
26 |
+
|
27 |
+
|
28 |
+
@nx._dispatchable(node_attrs="node_weight", edge_attrs="edge_weight")
|
29 |
+
def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
|
30 |
+
"""Optimal partitioning of a weighted tree using the Lukes algorithm.
|
31 |
+
|
32 |
+
This algorithm partitions a connected, acyclic graph featuring integer
|
33 |
+
node weights and float edge weights. The resulting clusters are such
|
34 |
+
that the total weight of the nodes in each cluster does not exceed
|
35 |
+
max_size and that the weight of the edges that are cut by the partition
|
36 |
+
is minimum. The algorithm is based on [1]_.
|
37 |
+
|
38 |
+
Parameters
|
39 |
+
----------
|
40 |
+
G : NetworkX graph
|
41 |
+
|
42 |
+
max_size : int
|
43 |
+
Maximum weight a partition can have in terms of sum of
|
44 |
+
node_weight for all nodes in the partition
|
45 |
+
|
46 |
+
edge_weight : key
|
47 |
+
Edge data key to use as weight. If None, the weights are all
|
48 |
+
set to one.
|
49 |
+
|
50 |
+
node_weight : key
|
51 |
+
Node data key to use as weight. If None, the weights are all
|
52 |
+
set to one. The data must be int.
|
53 |
+
|
54 |
+
Returns
|
55 |
+
-------
|
56 |
+
partition : list
|
57 |
+
A list of sets of nodes representing the clusters of the
|
58 |
+
partition.
|
59 |
+
|
60 |
+
Raises
|
61 |
+
------
|
62 |
+
NotATree
|
63 |
+
If G is not a tree.
|
64 |
+
TypeError
|
65 |
+
If any of the values of node_weight is not int.
|
66 |
+
|
67 |
+
References
|
68 |
+
----------
|
69 |
+
.. [1] Lukes, J. A. (1974).
|
70 |
+
"Efficient Algorithm for the Partitioning of Trees."
|
71 |
+
IBM Journal of Research and Development, 18(3), 217–224.
|
72 |
+
|
73 |
+
"""
|
74 |
+
# First sanity check and tree preparation
|
75 |
+
if not nx.is_tree(G):
|
76 |
+
raise nx.NotATree("lukes_partitioning works only on trees")
|
77 |
+
else:
|
78 |
+
if nx.is_directed(G):
|
79 |
+
root = [n for n, d in G.in_degree() if d == 0]
|
80 |
+
assert len(root) == 1
|
81 |
+
root = root[0]
|
82 |
+
t_G = deepcopy(G)
|
83 |
+
else:
|
84 |
+
root = choice(list(G.nodes))
|
85 |
+
# this has the desirable side effect of not inheriting attributes
|
86 |
+
t_G = nx.dfs_tree(G, root)
|
87 |
+
|
88 |
+
# Since we do not want to screw up the original graph,
|
89 |
+
# if we have a blank attribute, we make a deepcopy
|
90 |
+
if edge_weight is None or node_weight is None:
|
91 |
+
safe_G = deepcopy(G)
|
92 |
+
if edge_weight is None:
|
93 |
+
nx.set_edge_attributes(safe_G, D_EDGE_VALUE, D_EDGE_W)
|
94 |
+
edge_weight = D_EDGE_W
|
95 |
+
if node_weight is None:
|
96 |
+
nx.set_node_attributes(safe_G, D_NODE_VALUE, D_NODE_W)
|
97 |
+
node_weight = D_NODE_W
|
98 |
+
else:
|
99 |
+
safe_G = G
|
100 |
+
|
101 |
+
# Second sanity check
|
102 |
+
# The values of node_weight MUST BE int.
|
103 |
+
# I cannot see any room for duck typing without incurring serious
|
104 |
+
# danger of subtle bugs.
|
105 |
+
all_n_attr = nx.get_node_attributes(safe_G, node_weight).values()
|
106 |
+
for x in all_n_attr:
|
107 |
+
if not isinstance(x, int):
|
108 |
+
raise TypeError(
|
109 |
+
"lukes_partitioning needs integer "
|
110 |
+
f"values for node_weight ({node_weight})"
|
111 |
+
)
|
112 |
+
|
113 |
+
# SUBROUTINES -----------------------
|
114 |
+
# these functions are defined here for two reasons:
|
115 |
+
# - brevity: we can leverage global "safe_G"
|
116 |
+
# - caching: signatures are hashable
|
117 |
+
|
118 |
+
@not_implemented_for("undirected")
|
119 |
+
# this is intended to be called only on t_G
|
120 |
+
def _leaves(gr):
|
121 |
+
for x in gr.nodes:
|
122 |
+
if not nx.descendants(gr, x):
|
123 |
+
yield x
|
124 |
+
|
125 |
+
@not_implemented_for("undirected")
|
126 |
+
def _a_parent_of_leaves_only(gr):
|
127 |
+
tleaves = set(_leaves(gr))
|
128 |
+
for n in set(gr.nodes) - tleaves:
|
129 |
+
if all(x in tleaves for x in nx.descendants(gr, n)):
|
130 |
+
return n
|
131 |
+
|
132 |
+
@lru_cache(CLUSTER_EVAL_CACHE_SIZE)
|
133 |
+
def _value_of_cluster(cluster):
|
134 |
+
valid_edges = [e for e in safe_G.edges if e[0] in cluster and e[1] in cluster]
|
135 |
+
return sum(safe_G.edges[e][edge_weight] for e in valid_edges)
|
136 |
+
|
137 |
+
def _value_of_partition(partition):
|
138 |
+
return sum(_value_of_cluster(frozenset(c)) for c in partition)
|
139 |
+
|
140 |
+
@lru_cache(CLUSTER_EVAL_CACHE_SIZE)
|
141 |
+
def _weight_of_cluster(cluster):
|
142 |
+
return sum(safe_G.nodes[n][node_weight] for n in cluster)
|
143 |
+
|
144 |
+
def _pivot(partition, node):
|
145 |
+
ccx = [c for c in partition if node in c]
|
146 |
+
assert len(ccx) == 1
|
147 |
+
return ccx[0]
|
148 |
+
|
149 |
+
def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weight):
|
150 |
+
ccx = _pivot(partition_1, x)
|
151 |
+
cci = _pivot(partition_2, i)
|
152 |
+
merged_xi = ccx.union(cci)
|
153 |
+
|
154 |
+
# We first check if we can do the merge.
|
155 |
+
# If so, we do the actual calculations, otherwise we concatenate
|
156 |
+
if _weight_of_cluster(frozenset(merged_xi)) <= ref_weight:
|
157 |
+
cp1 = list(filter(lambda x: x != ccx, partition_1))
|
158 |
+
cp2 = list(filter(lambda x: x != cci, partition_2))
|
159 |
+
|
160 |
+
option_2 = [merged_xi] + cp1 + cp2
|
161 |
+
return option_2, _value_of_partition(option_2)
|
162 |
+
else:
|
163 |
+
option_1 = partition_1 + partition_2
|
164 |
+
return option_1, _value_of_partition(option_1)
|
165 |
+
|
166 |
+
# INITIALIZATION -----------------------
|
167 |
+
leaves = set(_leaves(t_G))
|
168 |
+
for lv in leaves:
|
169 |
+
t_G.nodes[lv][PKEY] = {}
|
170 |
+
slot = safe_G.nodes[lv][node_weight]
|
171 |
+
t_G.nodes[lv][PKEY][slot] = [{lv}]
|
172 |
+
t_G.nodes[lv][PKEY][0] = [{lv}]
|
173 |
+
|
174 |
+
for inner in [x for x in t_G.nodes if x not in leaves]:
|
175 |
+
t_G.nodes[inner][PKEY] = {}
|
176 |
+
slot = safe_G.nodes[inner][node_weight]
|
177 |
+
t_G.nodes[inner][PKEY][slot] = [{inner}]
|
178 |
+
nx._clear_cache(t_G)
|
179 |
+
|
180 |
+
# CORE ALGORITHM -----------------------
|
181 |
+
while True:
|
182 |
+
x_node = _a_parent_of_leaves_only(t_G)
|
183 |
+
weight_of_x = safe_G.nodes[x_node][node_weight]
|
184 |
+
best_value = 0
|
185 |
+
best_partition = None
|
186 |
+
bp_buffer = {}
|
187 |
+
x_descendants = nx.descendants(t_G, x_node)
|
188 |
+
for i_node in x_descendants:
|
189 |
+
for j in range(weight_of_x, max_size + 1):
|
190 |
+
for a, b in _split_n_from(j, weight_of_x):
|
191 |
+
if (
|
192 |
+
a not in t_G.nodes[x_node][PKEY]
|
193 |
+
or b not in t_G.nodes[i_node][PKEY]
|
194 |
+
):
|
195 |
+
# it's not possible to form this particular weight sum
|
196 |
+
continue
|
197 |
+
|
198 |
+
part1 = t_G.nodes[x_node][PKEY][a]
|
199 |
+
part2 = t_G.nodes[i_node][PKEY][b]
|
200 |
+
part, value = _concatenate_or_merge(part1, part2, x_node, i_node, j)
|
201 |
+
|
202 |
+
if j not in bp_buffer or bp_buffer[j][1] < value:
|
203 |
+
# we annotate in the buffer the best partition for j
|
204 |
+
bp_buffer[j] = part, value
|
205 |
+
|
206 |
+
# we also keep track of the overall best partition
|
207 |
+
if best_value <= value:
|
208 |
+
best_value = value
|
209 |
+
best_partition = part
|
210 |
+
|
211 |
+
# as illustrated in Lukes, once we finished a child, we can
|
212 |
+
# discharge the partitions we found into the graph
|
213 |
+
# (the key phrase is make all x == x')
|
214 |
+
# so that they are used by the subsequent children
|
215 |
+
for w, (best_part_for_vl, vl) in bp_buffer.items():
|
216 |
+
t_G.nodes[x_node][PKEY][w] = best_part_for_vl
|
217 |
+
bp_buffer.clear()
|
218 |
+
|
219 |
+
# the absolute best partition for this node
|
220 |
+
# across all weights has to be stored at 0
|
221 |
+
t_G.nodes[x_node][PKEY][0] = best_partition
|
222 |
+
t_G.remove_nodes_from(x_descendants)
|
223 |
+
|
224 |
+
if x_node == root:
|
225 |
+
# the 0-labeled partition of root
|
226 |
+
# is the optimal one for the whole tree
|
227 |
+
return t_G.nodes[root][PKEY][0]
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/modularity_max.py
ADDED
@@ -0,0 +1,451 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for detecting communities based on modularity."""
|
2 |
+
|
3 |
+
from collections import defaultdict
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
from networkx.algorithms.community.quality import modularity
|
7 |
+
from networkx.utils import not_implemented_for
|
8 |
+
from networkx.utils.mapped_queue import MappedQueue
|
9 |
+
|
10 |
+
__all__ = [
|
11 |
+
"greedy_modularity_communities",
|
12 |
+
"naive_greedy_modularity_communities",
|
13 |
+
]
|
14 |
+
|
15 |
+
|
16 |
+
def _greedy_modularity_communities_generator(G, weight=None, resolution=1):
|
17 |
+
r"""Yield community partitions of G and the modularity change at each step.
|
18 |
+
|
19 |
+
This function performs Clauset-Newman-Moore greedy modularity maximization [2]_
|
20 |
+
At each step of the process it yields the change in modularity that will occur in
|
21 |
+
the next step followed by yielding the new community partition after that step.
|
22 |
+
|
23 |
+
Greedy modularity maximization begins with each node in its own community
|
24 |
+
and repeatedly joins the pair of communities that lead to the largest
|
25 |
+
modularity until one community contains all nodes (the partition has one set).
|
26 |
+
|
27 |
+
This function maximizes the generalized modularity, where `resolution`
|
28 |
+
is the resolution parameter, often expressed as $\gamma$.
|
29 |
+
See :func:`~networkx.algorithms.community.quality.modularity`.
|
30 |
+
|
31 |
+
Parameters
|
32 |
+
----------
|
33 |
+
G : NetworkX graph
|
34 |
+
|
35 |
+
weight : string or None, optional (default=None)
|
36 |
+
The name of an edge attribute that holds the numerical value used
|
37 |
+
as a weight. If None, then each edge has weight 1.
|
38 |
+
The degree is the sum of the edge weights adjacent to the node.
|
39 |
+
|
40 |
+
resolution : float (default=1)
|
41 |
+
If resolution is less than 1, modularity favors larger communities.
|
42 |
+
Greater than 1 favors smaller communities.
|
43 |
+
|
44 |
+
Yields
|
45 |
+
------
|
46 |
+
Alternating yield statements produce the following two objects:
|
47 |
+
|
48 |
+
communities: dict_values
|
49 |
+
A dict_values of frozensets of nodes, one for each community.
|
50 |
+
This represents a partition of the nodes of the graph into communities.
|
51 |
+
The first yield is the partition with each node in its own community.
|
52 |
+
|
53 |
+
dq: float
|
54 |
+
The change in modularity when merging the next two communities
|
55 |
+
that leads to the largest modularity.
|
56 |
+
|
57 |
+
See Also
|
58 |
+
--------
|
59 |
+
modularity
|
60 |
+
|
61 |
+
References
|
62 |
+
----------
|
63 |
+
.. [1] Newman, M. E. J. "Networks: An Introduction", page 224
|
64 |
+
Oxford University Press 2011.
|
65 |
+
.. [2] Clauset, A., Newman, M. E., & Moore, C.
|
66 |
+
"Finding community structure in very large networks."
|
67 |
+
Physical Review E 70(6), 2004.
|
68 |
+
.. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
|
69 |
+
Detection" Phys. Rev. E74, 2006.
|
70 |
+
.. [4] Newman, M. E. J."Analysis of weighted networks"
|
71 |
+
Physical Review E 70(5 Pt 2):056131, 2004.
|
72 |
+
"""
|
73 |
+
directed = G.is_directed()
|
74 |
+
N = G.number_of_nodes()
|
75 |
+
|
76 |
+
# Count edges (or the sum of edge-weights for weighted graphs)
|
77 |
+
m = G.size(weight)
|
78 |
+
q0 = 1 / m
|
79 |
+
|
80 |
+
# Calculate degrees (notation from the papers)
|
81 |
+
# a : the fraction of (weighted) out-degree for each node
|
82 |
+
# b : the fraction of (weighted) in-degree for each node
|
83 |
+
if directed:
|
84 |
+
a = {node: deg_out * q0 for node, deg_out in G.out_degree(weight=weight)}
|
85 |
+
b = {node: deg_in * q0 for node, deg_in in G.in_degree(weight=weight)}
|
86 |
+
else:
|
87 |
+
a = b = {node: deg * q0 * 0.5 for node, deg in G.degree(weight=weight)}
|
88 |
+
|
89 |
+
# this preliminary step collects the edge weights for each node pair
|
90 |
+
# It handles multigraph and digraph and works fine for graph.
|
91 |
+
dq_dict = defaultdict(lambda: defaultdict(float))
|
92 |
+
for u, v, wt in G.edges(data=weight, default=1):
|
93 |
+
if u == v:
|
94 |
+
continue
|
95 |
+
dq_dict[u][v] += wt
|
96 |
+
dq_dict[v][u] += wt
|
97 |
+
|
98 |
+
# now scale and subtract the expected edge-weights term
|
99 |
+
for u, nbrdict in dq_dict.items():
|
100 |
+
for v, wt in nbrdict.items():
|
101 |
+
dq_dict[u][v] = q0 * wt - resolution * (a[u] * b[v] + b[u] * a[v])
|
102 |
+
|
103 |
+
# Use -dq to get a max_heap instead of a min_heap
|
104 |
+
# dq_heap holds a heap for each node's neighbors
|
105 |
+
dq_heap = {u: MappedQueue({(u, v): -dq for v, dq in dq_dict[u].items()}) for u in G}
|
106 |
+
# H -> all_dq_heap holds a heap with the best items for each node
|
107 |
+
H = MappedQueue([dq_heap[n].heap[0] for n in G if len(dq_heap[n]) > 0])
|
108 |
+
|
109 |
+
# Initialize single-node communities
|
110 |
+
communities = {n: frozenset([n]) for n in G}
|
111 |
+
yield communities.values()
|
112 |
+
|
113 |
+
# Merge the two communities that lead to the largest modularity
|
114 |
+
while len(H) > 1:
|
115 |
+
# Find best merge
|
116 |
+
# Remove from heap of row maxes
|
117 |
+
# Ties will be broken by choosing the pair with lowest min community id
|
118 |
+
try:
|
119 |
+
negdq, u, v = H.pop()
|
120 |
+
except IndexError:
|
121 |
+
break
|
122 |
+
dq = -negdq
|
123 |
+
yield dq
|
124 |
+
# Remove best merge from row u heap
|
125 |
+
dq_heap[u].pop()
|
126 |
+
# Push new row max onto H
|
127 |
+
if len(dq_heap[u]) > 0:
|
128 |
+
H.push(dq_heap[u].heap[0])
|
129 |
+
# If this element was also at the root of row v, we need to remove the
|
130 |
+
# duplicate entry from H
|
131 |
+
if dq_heap[v].heap[0] == (v, u):
|
132 |
+
H.remove((v, u))
|
133 |
+
# Remove best merge from row v heap
|
134 |
+
dq_heap[v].remove((v, u))
|
135 |
+
# Push new row max onto H
|
136 |
+
if len(dq_heap[v]) > 0:
|
137 |
+
H.push(dq_heap[v].heap[0])
|
138 |
+
else:
|
139 |
+
# Duplicate wasn't in H, just remove from row v heap
|
140 |
+
dq_heap[v].remove((v, u))
|
141 |
+
|
142 |
+
# Perform merge
|
143 |
+
communities[v] = frozenset(communities[u] | communities[v])
|
144 |
+
del communities[u]
|
145 |
+
|
146 |
+
# Get neighbor communities connected to the merged communities
|
147 |
+
u_nbrs = set(dq_dict[u])
|
148 |
+
v_nbrs = set(dq_dict[v])
|
149 |
+
all_nbrs = (u_nbrs | v_nbrs) - {u, v}
|
150 |
+
both_nbrs = u_nbrs & v_nbrs
|
151 |
+
# Update dq for merge of u into v
|
152 |
+
for w in all_nbrs:
|
153 |
+
# Calculate new dq value
|
154 |
+
if w in both_nbrs:
|
155 |
+
dq_vw = dq_dict[v][w] + dq_dict[u][w]
|
156 |
+
elif w in v_nbrs:
|
157 |
+
dq_vw = dq_dict[v][w] - resolution * (a[u] * b[w] + a[w] * b[u])
|
158 |
+
else: # w in u_nbrs
|
159 |
+
dq_vw = dq_dict[u][w] - resolution * (a[v] * b[w] + a[w] * b[v])
|
160 |
+
# Update rows v and w
|
161 |
+
for row, col in [(v, w), (w, v)]:
|
162 |
+
dq_heap_row = dq_heap[row]
|
163 |
+
# Update dict for v,w only (u is removed below)
|
164 |
+
dq_dict[row][col] = dq_vw
|
165 |
+
# Save old max of per-row heap
|
166 |
+
if len(dq_heap_row) > 0:
|
167 |
+
d_oldmax = dq_heap_row.heap[0]
|
168 |
+
else:
|
169 |
+
d_oldmax = None
|
170 |
+
# Add/update heaps
|
171 |
+
d = (row, col)
|
172 |
+
d_negdq = -dq_vw
|
173 |
+
# Save old value for finding heap index
|
174 |
+
if w in v_nbrs:
|
175 |
+
# Update existing element in per-row heap
|
176 |
+
dq_heap_row.update(d, d, priority=d_negdq)
|
177 |
+
else:
|
178 |
+
# We're creating a new nonzero element, add to heap
|
179 |
+
dq_heap_row.push(d, priority=d_negdq)
|
180 |
+
# Update heap of row maxes if necessary
|
181 |
+
if d_oldmax is None:
|
182 |
+
# No entries previously in this row, push new max
|
183 |
+
H.push(d, priority=d_negdq)
|
184 |
+
else:
|
185 |
+
# We've updated an entry in this row, has the max changed?
|
186 |
+
row_max = dq_heap_row.heap[0]
|
187 |
+
if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
|
188 |
+
H.update(d_oldmax, row_max)
|
189 |
+
|
190 |
+
# Remove row/col u from dq_dict matrix
|
191 |
+
for w in dq_dict[u]:
|
192 |
+
# Remove from dict
|
193 |
+
dq_old = dq_dict[w][u]
|
194 |
+
del dq_dict[w][u]
|
195 |
+
# Remove from heaps if we haven't already
|
196 |
+
if w != v:
|
197 |
+
# Remove both row and column
|
198 |
+
for row, col in [(w, u), (u, w)]:
|
199 |
+
dq_heap_row = dq_heap[row]
|
200 |
+
# Check if replaced dq is row max
|
201 |
+
d_old = (row, col)
|
202 |
+
if dq_heap_row.heap[0] == d_old:
|
203 |
+
# Update per-row heap and heap of row maxes
|
204 |
+
dq_heap_row.remove(d_old)
|
205 |
+
H.remove(d_old)
|
206 |
+
# Update row max
|
207 |
+
if len(dq_heap_row) > 0:
|
208 |
+
H.push(dq_heap_row.heap[0])
|
209 |
+
else:
|
210 |
+
# Only update per-row heap
|
211 |
+
dq_heap_row.remove(d_old)
|
212 |
+
|
213 |
+
del dq_dict[u]
|
214 |
+
# Mark row u as deleted, but keep placeholder
|
215 |
+
dq_heap[u] = MappedQueue()
|
216 |
+
# Merge u into v and update a
|
217 |
+
a[v] += a[u]
|
218 |
+
a[u] = 0
|
219 |
+
if directed:
|
220 |
+
b[v] += b[u]
|
221 |
+
b[u] = 0
|
222 |
+
|
223 |
+
yield communities.values()
|
224 |
+
|
225 |
+
|
226 |
+
@nx._dispatchable(edge_attrs="weight")
|
227 |
+
def greedy_modularity_communities(
|
228 |
+
G,
|
229 |
+
weight=None,
|
230 |
+
resolution=1,
|
231 |
+
cutoff=1,
|
232 |
+
best_n=None,
|
233 |
+
):
|
234 |
+
r"""Find communities in G using greedy modularity maximization.
|
235 |
+
|
236 |
+
This function uses Clauset-Newman-Moore greedy modularity maximization [2]_
|
237 |
+
to find the community partition with the largest modularity.
|
238 |
+
|
239 |
+
Greedy modularity maximization begins with each node in its own community
|
240 |
+
and repeatedly joins the pair of communities that lead to the largest
|
241 |
+
modularity until no further increase in modularity is possible (a maximum).
|
242 |
+
Two keyword arguments adjust the stopping condition. `cutoff` is a lower
|
243 |
+
limit on the number of communities so you can stop the process before
|
244 |
+
reaching a maximum (used to save computation time). `best_n` is an upper
|
245 |
+
limit on the number of communities so you can make the process continue
|
246 |
+
until at most n communities remain even if the maximum modularity occurs
|
247 |
+
for more. To obtain exactly n communities, set both `cutoff` and `best_n` to n.
|
248 |
+
|
249 |
+
This function maximizes the generalized modularity, where `resolution`
|
250 |
+
is the resolution parameter, often expressed as $\gamma$.
|
251 |
+
See :func:`~networkx.algorithms.community.quality.modularity`.
|
252 |
+
|
253 |
+
Parameters
|
254 |
+
----------
|
255 |
+
G : NetworkX graph
|
256 |
+
|
257 |
+
weight : string or None, optional (default=None)
|
258 |
+
The name of an edge attribute that holds the numerical value used
|
259 |
+
as a weight. If None, then each edge has weight 1.
|
260 |
+
The degree is the sum of the edge weights adjacent to the node.
|
261 |
+
|
262 |
+
resolution : float, optional (default=1)
|
263 |
+
If resolution is less than 1, modularity favors larger communities.
|
264 |
+
Greater than 1 favors smaller communities.
|
265 |
+
|
266 |
+
cutoff : int, optional (default=1)
|
267 |
+
A minimum number of communities below which the merging process stops.
|
268 |
+
The process stops at this number of communities even if modularity
|
269 |
+
is not maximized. The goal is to let the user stop the process early.
|
270 |
+
The process stops before the cutoff if it finds a maximum of modularity.
|
271 |
+
|
272 |
+
best_n : int or None, optional (default=None)
|
273 |
+
A maximum number of communities above which the merging process will
|
274 |
+
not stop. This forces community merging to continue after modularity
|
275 |
+
starts to decrease until `best_n` communities remain.
|
276 |
+
If ``None``, don't force it to continue beyond a maximum.
|
277 |
+
|
278 |
+
Raises
|
279 |
+
------
|
280 |
+
ValueError : If the `cutoff` or `best_n` value is not in the range
|
281 |
+
``[1, G.number_of_nodes()]``, or if `best_n` < `cutoff`.
|
282 |
+
|
283 |
+
Returns
|
284 |
+
-------
|
285 |
+
communities: list
|
286 |
+
A list of frozensets of nodes, one for each community.
|
287 |
+
Sorted by length with largest communities first.
|
288 |
+
|
289 |
+
Examples
|
290 |
+
--------
|
291 |
+
>>> G = nx.karate_club_graph()
|
292 |
+
>>> c = nx.community.greedy_modularity_communities(G)
|
293 |
+
>>> sorted(c[0])
|
294 |
+
[8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
|
295 |
+
|
296 |
+
See Also
|
297 |
+
--------
|
298 |
+
modularity
|
299 |
+
|
300 |
+
References
|
301 |
+
----------
|
302 |
+
.. [1] Newman, M. E. J. "Networks: An Introduction", page 224
|
303 |
+
Oxford University Press 2011.
|
304 |
+
.. [2] Clauset, A., Newman, M. E., & Moore, C.
|
305 |
+
"Finding community structure in very large networks."
|
306 |
+
Physical Review E 70(6), 2004.
|
307 |
+
.. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
|
308 |
+
Detection" Phys. Rev. E74, 2006.
|
309 |
+
.. [4] Newman, M. E. J."Analysis of weighted networks"
|
310 |
+
Physical Review E 70(5 Pt 2):056131, 2004.
|
311 |
+
"""
|
312 |
+
if not G.size():
|
313 |
+
return [{n} for n in G]
|
314 |
+
|
315 |
+
if (cutoff < 1) or (cutoff > G.number_of_nodes()):
|
316 |
+
raise ValueError(f"cutoff must be between 1 and {len(G)}. Got {cutoff}.")
|
317 |
+
if best_n is not None:
|
318 |
+
if (best_n < 1) or (best_n > G.number_of_nodes()):
|
319 |
+
raise ValueError(f"best_n must be between 1 and {len(G)}. Got {best_n}.")
|
320 |
+
if best_n < cutoff:
|
321 |
+
raise ValueError(f"Must have best_n >= cutoff. Got {best_n} < {cutoff}")
|
322 |
+
if best_n == 1:
|
323 |
+
return [set(G)]
|
324 |
+
else:
|
325 |
+
best_n = G.number_of_nodes()
|
326 |
+
|
327 |
+
# retrieve generator object to construct output
|
328 |
+
community_gen = _greedy_modularity_communities_generator(
|
329 |
+
G, weight=weight, resolution=resolution
|
330 |
+
)
|
331 |
+
|
332 |
+
# construct the first best community
|
333 |
+
communities = next(community_gen)
|
334 |
+
|
335 |
+
# continue merging communities until one of the breaking criteria is satisfied
|
336 |
+
while len(communities) > cutoff:
|
337 |
+
try:
|
338 |
+
dq = next(community_gen)
|
339 |
+
# StopIteration occurs when communities are the connected components
|
340 |
+
except StopIteration:
|
341 |
+
communities = sorted(communities, key=len, reverse=True)
|
342 |
+
# if best_n requires more merging, merge big sets for highest modularity
|
343 |
+
while len(communities) > best_n:
|
344 |
+
comm1, comm2, *rest = communities
|
345 |
+
communities = [comm1 ^ comm2]
|
346 |
+
communities.extend(rest)
|
347 |
+
return communities
|
348 |
+
|
349 |
+
# keep going unless max_mod is reached or best_n says to merge more
|
350 |
+
if dq < 0 and len(communities) <= best_n:
|
351 |
+
break
|
352 |
+
communities = next(community_gen)
|
353 |
+
|
354 |
+
return sorted(communities, key=len, reverse=True)
|
355 |
+
|
356 |
+
|
357 |
+
@not_implemented_for("directed")
|
358 |
+
@not_implemented_for("multigraph")
|
359 |
+
@nx._dispatchable(edge_attrs="weight")
|
360 |
+
def naive_greedy_modularity_communities(G, resolution=1, weight=None):
|
361 |
+
r"""Find communities in G using greedy modularity maximization.
|
362 |
+
|
363 |
+
This implementation is O(n^4), much slower than alternatives, but it is
|
364 |
+
provided as an easy-to-understand reference implementation.
|
365 |
+
|
366 |
+
Greedy modularity maximization begins with each node in its own community
|
367 |
+
and joins the pair of communities that most increases modularity until no
|
368 |
+
such pair exists.
|
369 |
+
|
370 |
+
This function maximizes the generalized modularity, where `resolution`
|
371 |
+
is the resolution parameter, often expressed as $\gamma$.
|
372 |
+
See :func:`~networkx.algorithms.community.quality.modularity`.
|
373 |
+
|
374 |
+
Parameters
|
375 |
+
----------
|
376 |
+
G : NetworkX graph
|
377 |
+
Graph must be simple and undirected.
|
378 |
+
|
379 |
+
resolution : float (default=1)
|
380 |
+
If resolution is less than 1, modularity favors larger communities.
|
381 |
+
Greater than 1 favors smaller communities.
|
382 |
+
|
383 |
+
weight : string or None, optional (default=None)
|
384 |
+
The name of an edge attribute that holds the numerical value used
|
385 |
+
as a weight. If None, then each edge has weight 1.
|
386 |
+
The degree is the sum of the edge weights adjacent to the node.
|
387 |
+
|
388 |
+
Returns
|
389 |
+
-------
|
390 |
+
list
|
391 |
+
A list of sets of nodes, one for each community.
|
392 |
+
Sorted by length with largest communities first.
|
393 |
+
|
394 |
+
Examples
|
395 |
+
--------
|
396 |
+
>>> G = nx.karate_club_graph()
|
397 |
+
>>> c = nx.community.naive_greedy_modularity_communities(G)
|
398 |
+
>>> sorted(c[0])
|
399 |
+
[8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
|
400 |
+
|
401 |
+
See Also
|
402 |
+
--------
|
403 |
+
greedy_modularity_communities
|
404 |
+
modularity
|
405 |
+
"""
|
406 |
+
# First create one community for each node
|
407 |
+
communities = [frozenset([u]) for u in G.nodes()]
|
408 |
+
# Track merges
|
409 |
+
merges = []
|
410 |
+
# Greedily merge communities until no improvement is possible
|
411 |
+
old_modularity = None
|
412 |
+
new_modularity = modularity(G, communities, resolution=resolution, weight=weight)
|
413 |
+
while old_modularity is None or new_modularity > old_modularity:
|
414 |
+
# Save modularity for comparison
|
415 |
+
old_modularity = new_modularity
|
416 |
+
# Find best pair to merge
|
417 |
+
trial_communities = list(communities)
|
418 |
+
to_merge = None
|
419 |
+
for i, u in enumerate(communities):
|
420 |
+
for j, v in enumerate(communities):
|
421 |
+
# Skip i==j and empty communities
|
422 |
+
if j <= i or len(u) == 0 or len(v) == 0:
|
423 |
+
continue
|
424 |
+
# Merge communities u and v
|
425 |
+
trial_communities[j] = u | v
|
426 |
+
trial_communities[i] = frozenset([])
|
427 |
+
trial_modularity = modularity(
|
428 |
+
G, trial_communities, resolution=resolution, weight=weight
|
429 |
+
)
|
430 |
+
if trial_modularity >= new_modularity:
|
431 |
+
# Check if strictly better or tie
|
432 |
+
if trial_modularity > new_modularity:
|
433 |
+
# Found new best, save modularity and group indexes
|
434 |
+
new_modularity = trial_modularity
|
435 |
+
to_merge = (i, j, new_modularity - old_modularity)
|
436 |
+
elif to_merge and min(i, j) < min(to_merge[0], to_merge[1]):
|
437 |
+
# Break ties by choosing pair with lowest min id
|
438 |
+
new_modularity = trial_modularity
|
439 |
+
to_merge = (i, j, new_modularity - old_modularity)
|
440 |
+
# Un-merge
|
441 |
+
trial_communities[i] = u
|
442 |
+
trial_communities[j] = v
|
443 |
+
if to_merge is not None:
|
444 |
+
# If the best merge improves modularity, use it
|
445 |
+
merges.append(to_merge)
|
446 |
+
i, j, dq = to_merge
|
447 |
+
u, v = communities[i], communities[j]
|
448 |
+
communities[j] = u | v
|
449 |
+
communities[i] = frozenset([])
|
450 |
+
# Remove empty communities and sort
|
451 |
+
return sorted((c for c in communities if len(c) > 0), key=len, reverse=True)
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/quality.py
ADDED
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Functions for measuring the quality of a partition (into
|
2 |
+
communities).
|
3 |
+
|
4 |
+
"""
|
5 |
+
|
6 |
+
from itertools import combinations
|
7 |
+
|
8 |
+
import networkx as nx
|
9 |
+
from networkx import NetworkXError
|
10 |
+
from networkx.algorithms.community.community_utils import is_partition
|
11 |
+
from networkx.utils.decorators import argmap
|
12 |
+
|
13 |
+
__all__ = ["modularity", "partition_quality"]
|
14 |
+
|
15 |
+
|
16 |
+
class NotAPartition(NetworkXError):
|
17 |
+
"""Raised if a given collection is not a partition."""
|
18 |
+
|
19 |
+
def __init__(self, G, collection):
|
20 |
+
msg = f"{collection} is not a valid partition of the graph {G}"
|
21 |
+
super().__init__(msg)
|
22 |
+
|
23 |
+
|
24 |
+
def _require_partition(G, partition):
|
25 |
+
"""Decorator to check that a valid partition is input to a function
|
26 |
+
|
27 |
+
Raises :exc:`networkx.NetworkXError` if the partition is not valid.
|
28 |
+
|
29 |
+
This decorator should be used on functions whose first two arguments
|
30 |
+
are a graph and a partition of the nodes of that graph (in that
|
31 |
+
order)::
|
32 |
+
|
33 |
+
>>> @require_partition
|
34 |
+
... def foo(G, partition):
|
35 |
+
... print("partition is valid!")
|
36 |
+
...
|
37 |
+
>>> G = nx.complete_graph(5)
|
38 |
+
>>> partition = [{0, 1}, {2, 3}, {4}]
|
39 |
+
>>> foo(G, partition)
|
40 |
+
partition is valid!
|
41 |
+
>>> partition = [{0}, {2, 3}, {4}]
|
42 |
+
>>> foo(G, partition)
|
43 |
+
Traceback (most recent call last):
|
44 |
+
...
|
45 |
+
networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
|
46 |
+
>>> partition = [{0, 1}, {1, 2, 3}, {4}]
|
47 |
+
>>> foo(G, partition)
|
48 |
+
Traceback (most recent call last):
|
49 |
+
...
|
50 |
+
networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
|
51 |
+
|
52 |
+
"""
|
53 |
+
if is_partition(G, partition):
|
54 |
+
return G, partition
|
55 |
+
raise nx.NetworkXError("`partition` is not a valid partition of the nodes of G")
|
56 |
+
|
57 |
+
|
58 |
+
require_partition = argmap(_require_partition, (0, 1))
|
59 |
+
|
60 |
+
|
61 |
+
@nx._dispatchable
|
62 |
+
def intra_community_edges(G, partition):
|
63 |
+
"""Returns the number of intra-community edges for a partition of `G`.
|
64 |
+
|
65 |
+
Parameters
|
66 |
+
----------
|
67 |
+
G : NetworkX graph.
|
68 |
+
|
69 |
+
partition : iterable of sets of nodes
|
70 |
+
This must be a partition of the nodes of `G`.
|
71 |
+
|
72 |
+
The "intra-community edges" are those edges joining a pair of nodes
|
73 |
+
in the same block of the partition.
|
74 |
+
|
75 |
+
"""
|
76 |
+
return sum(G.subgraph(block).size() for block in partition)
|
77 |
+
|
78 |
+
|
79 |
+
@nx._dispatchable
|
80 |
+
def inter_community_edges(G, partition):
|
81 |
+
"""Returns the number of inter-community edges for a partition of `G`.
|
82 |
+
according to the given
|
83 |
+
partition of the nodes of `G`.
|
84 |
+
|
85 |
+
Parameters
|
86 |
+
----------
|
87 |
+
G : NetworkX graph.
|
88 |
+
|
89 |
+
partition : iterable of sets of nodes
|
90 |
+
This must be a partition of the nodes of `G`.
|
91 |
+
|
92 |
+
The *inter-community edges* are those edges joining a pair of nodes
|
93 |
+
in different blocks of the partition.
|
94 |
+
|
95 |
+
Implementation note: this function creates an intermediate graph
|
96 |
+
that may require the same amount of memory as that of `G`.
|
97 |
+
|
98 |
+
"""
|
99 |
+
# Alternate implementation that does not require constructing a new
|
100 |
+
# graph object (but does require constructing an affiliation
|
101 |
+
# dictionary):
|
102 |
+
#
|
103 |
+
# aff = dict(chain.from_iterable(((v, block) for v in block)
|
104 |
+
# for block in partition))
|
105 |
+
# return sum(1 for u, v in G.edges() if aff[u] != aff[v])
|
106 |
+
#
|
107 |
+
MG = nx.MultiDiGraph if G.is_directed() else nx.MultiGraph
|
108 |
+
return nx.quotient_graph(G, partition, create_using=MG).size()
|
109 |
+
|
110 |
+
|
111 |
+
@nx._dispatchable
|
112 |
+
def inter_community_non_edges(G, partition):
|
113 |
+
"""Returns the number of inter-community non-edges according to the
|
114 |
+
given partition of the nodes of `G`.
|
115 |
+
|
116 |
+
Parameters
|
117 |
+
----------
|
118 |
+
G : NetworkX graph.
|
119 |
+
|
120 |
+
partition : iterable of sets of nodes
|
121 |
+
This must be a partition of the nodes of `G`.
|
122 |
+
|
123 |
+
A *non-edge* is a pair of nodes (undirected if `G` is undirected)
|
124 |
+
that are not adjacent in `G`. The *inter-community non-edges* are
|
125 |
+
those non-edges on a pair of nodes in different blocks of the
|
126 |
+
partition.
|
127 |
+
|
128 |
+
Implementation note: this function creates two intermediate graphs,
|
129 |
+
which may require up to twice the amount of memory as required to
|
130 |
+
store `G`.
|
131 |
+
|
132 |
+
"""
|
133 |
+
# Alternate implementation that does not require constructing two
|
134 |
+
# new graph objects (but does require constructing an affiliation
|
135 |
+
# dictionary):
|
136 |
+
#
|
137 |
+
# aff = dict(chain.from_iterable(((v, block) for v in block)
|
138 |
+
# for block in partition))
|
139 |
+
# return sum(1 for u, v in nx.non_edges(G) if aff[u] != aff[v])
|
140 |
+
#
|
141 |
+
return inter_community_edges(nx.complement(G), partition)
|
142 |
+
|
143 |
+
|
144 |
+
@nx._dispatchable(edge_attrs="weight")
|
145 |
+
def modularity(G, communities, weight="weight", resolution=1):
|
146 |
+
r"""Returns the modularity of the given partition of the graph.
|
147 |
+
|
148 |
+
Modularity is defined in [1]_ as
|
149 |
+
|
150 |
+
.. math::
|
151 |
+
Q = \frac{1}{2m} \sum_{ij} \left( A_{ij} - \gamma\frac{k_ik_j}{2m}\right)
|
152 |
+
\delta(c_i,c_j)
|
153 |
+
|
154 |
+
where $m$ is the number of edges (or sum of all edge weights as in [5]_),
|
155 |
+
$A$ is the adjacency matrix of `G`, $k_i$ is the (weighted) degree of $i$,
|
156 |
+
$\gamma$ is the resolution parameter, and $\delta(c_i, c_j)$ is 1 if $i$ and
|
157 |
+
$j$ are in the same community else 0.
|
158 |
+
|
159 |
+
According to [2]_ (and verified by some algebra) this can be reduced to
|
160 |
+
|
161 |
+
.. math::
|
162 |
+
Q = \sum_{c=1}^{n}
|
163 |
+
\left[ \frac{L_c}{m} - \gamma\left( \frac{k_c}{2m} \right) ^2 \right]
|
164 |
+
|
165 |
+
where the sum iterates over all communities $c$, $m$ is the number of edges,
|
166 |
+
$L_c$ is the number of intra-community links for community $c$,
|
167 |
+
$k_c$ is the sum of degrees of the nodes in community $c$,
|
168 |
+
and $\gamma$ is the resolution parameter.
|
169 |
+
|
170 |
+
The resolution parameter sets an arbitrary tradeoff between intra-group
|
171 |
+
edges and inter-group edges. More complex grouping patterns can be
|
172 |
+
discovered by analyzing the same network with multiple values of gamma
|
173 |
+
and then combining the results [3]_. That said, it is very common to
|
174 |
+
simply use gamma=1. More on the choice of gamma is in [4]_.
|
175 |
+
|
176 |
+
The second formula is the one actually used in calculation of the modularity.
|
177 |
+
For directed graphs the second formula replaces $k_c$ with $k^{in}_c k^{out}_c$.
|
178 |
+
|
179 |
+
Parameters
|
180 |
+
----------
|
181 |
+
G : NetworkX Graph
|
182 |
+
|
183 |
+
communities : list or iterable of set of nodes
|
184 |
+
These node sets must represent a partition of G's nodes.
|
185 |
+
|
186 |
+
weight : string or None, optional (default="weight")
|
187 |
+
The edge attribute that holds the numerical value used
|
188 |
+
as a weight. If None or an edge does not have that attribute,
|
189 |
+
then that edge has weight 1.
|
190 |
+
|
191 |
+
resolution : float (default=1)
|
192 |
+
If resolution is less than 1, modularity favors larger communities.
|
193 |
+
Greater than 1 favors smaller communities.
|
194 |
+
|
195 |
+
Returns
|
196 |
+
-------
|
197 |
+
Q : float
|
198 |
+
The modularity of the partition.
|
199 |
+
|
200 |
+
Raises
|
201 |
+
------
|
202 |
+
NotAPartition
|
203 |
+
If `communities` is not a partition of the nodes of `G`.
|
204 |
+
|
205 |
+
Examples
|
206 |
+
--------
|
207 |
+
>>> G = nx.barbell_graph(3, 0)
|
208 |
+
>>> nx.community.modularity(G, [{0, 1, 2}, {3, 4, 5}])
|
209 |
+
0.35714285714285715
|
210 |
+
>>> nx.community.modularity(G, nx.community.label_propagation_communities(G))
|
211 |
+
0.35714285714285715
|
212 |
+
|
213 |
+
References
|
214 |
+
----------
|
215 |
+
.. [1] M. E. J. Newman "Networks: An Introduction", page 224.
|
216 |
+
Oxford University Press, 2011.
|
217 |
+
.. [2] Clauset, Aaron, Mark EJ Newman, and Cristopher Moore.
|
218 |
+
"Finding community structure in very large networks."
|
219 |
+
Phys. Rev. E 70.6 (2004). <https://arxiv.org/abs/cond-mat/0408187>
|
220 |
+
.. [3] Reichardt and Bornholdt "Statistical Mechanics of Community Detection"
|
221 |
+
Phys. Rev. E 74, 016110, 2006. https://doi.org/10.1103/PhysRevE.74.016110
|
222 |
+
.. [4] M. E. J. Newman, "Equivalence between modularity optimization and
|
223 |
+
maximum likelihood methods for community detection"
|
224 |
+
Phys. Rev. E 94, 052315, 2016. https://doi.org/10.1103/PhysRevE.94.052315
|
225 |
+
.. [5] Blondel, V.D. et al. "Fast unfolding of communities in large
|
226 |
+
networks" J. Stat. Mech 10008, 1-12 (2008).
|
227 |
+
https://doi.org/10.1088/1742-5468/2008/10/P10008
|
228 |
+
"""
|
229 |
+
if not isinstance(communities, list):
|
230 |
+
communities = list(communities)
|
231 |
+
if not is_partition(G, communities):
|
232 |
+
raise NotAPartition(G, communities)
|
233 |
+
|
234 |
+
directed = G.is_directed()
|
235 |
+
if directed:
|
236 |
+
out_degree = dict(G.out_degree(weight=weight))
|
237 |
+
in_degree = dict(G.in_degree(weight=weight))
|
238 |
+
m = sum(out_degree.values())
|
239 |
+
norm = 1 / m**2
|
240 |
+
else:
|
241 |
+
out_degree = in_degree = dict(G.degree(weight=weight))
|
242 |
+
deg_sum = sum(out_degree.values())
|
243 |
+
m = deg_sum / 2
|
244 |
+
norm = 1 / deg_sum**2
|
245 |
+
|
246 |
+
def community_contribution(community):
|
247 |
+
comm = set(community)
|
248 |
+
L_c = sum(wt for u, v, wt in G.edges(comm, data=weight, default=1) if v in comm)
|
249 |
+
|
250 |
+
out_degree_sum = sum(out_degree[u] for u in comm)
|
251 |
+
in_degree_sum = sum(in_degree[u] for u in comm) if directed else out_degree_sum
|
252 |
+
|
253 |
+
return L_c / m - resolution * out_degree_sum * in_degree_sum * norm
|
254 |
+
|
255 |
+
return sum(map(community_contribution, communities))
|
256 |
+
|
257 |
+
|
258 |
+
@require_partition
|
259 |
+
@nx._dispatchable
|
260 |
+
def partition_quality(G, partition):
|
261 |
+
"""Returns the coverage and performance of a partition of G.
|
262 |
+
|
263 |
+
The *coverage* of a partition is the ratio of the number of
|
264 |
+
intra-community edges to the total number of edges in the graph.
|
265 |
+
|
266 |
+
The *performance* of a partition is the number of
|
267 |
+
intra-community edges plus inter-community non-edges divided by the total
|
268 |
+
number of potential edges.
|
269 |
+
|
270 |
+
This algorithm has complexity $O(C^2 + L)$ where C is the number of communities and L is the number of links.
|
271 |
+
|
272 |
+
Parameters
|
273 |
+
----------
|
274 |
+
G : NetworkX graph
|
275 |
+
|
276 |
+
partition : sequence
|
277 |
+
Partition of the nodes of `G`, represented as a sequence of
|
278 |
+
sets of nodes (blocks). Each block of the partition represents a
|
279 |
+
community.
|
280 |
+
|
281 |
+
Returns
|
282 |
+
-------
|
283 |
+
(float, float)
|
284 |
+
The (coverage, performance) tuple of the partition, as defined above.
|
285 |
+
|
286 |
+
Raises
|
287 |
+
------
|
288 |
+
NetworkXError
|
289 |
+
If `partition` is not a valid partition of the nodes of `G`.
|
290 |
+
|
291 |
+
Notes
|
292 |
+
-----
|
293 |
+
If `G` is a multigraph;
|
294 |
+
- for coverage, the multiplicity of edges is counted
|
295 |
+
- for performance, the result is -1 (total number of possible edges is not defined)
|
296 |
+
|
297 |
+
References
|
298 |
+
----------
|
299 |
+
.. [1] Santo Fortunato.
|
300 |
+
"Community Detection in Graphs".
|
301 |
+
*Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
|
302 |
+
<https://arxiv.org/abs/0906.0612>
|
303 |
+
"""
|
304 |
+
|
305 |
+
node_community = {}
|
306 |
+
for i, community in enumerate(partition):
|
307 |
+
for node in community:
|
308 |
+
node_community[node] = i
|
309 |
+
|
310 |
+
# `performance` is not defined for multigraphs
|
311 |
+
if not G.is_multigraph():
|
312 |
+
# Iterate over the communities, quadratic, to calculate `possible_inter_community_edges`
|
313 |
+
possible_inter_community_edges = sum(
|
314 |
+
len(p1) * len(p2) for p1, p2 in combinations(partition, 2)
|
315 |
+
)
|
316 |
+
|
317 |
+
if G.is_directed():
|
318 |
+
possible_inter_community_edges *= 2
|
319 |
+
else:
|
320 |
+
possible_inter_community_edges = 0
|
321 |
+
|
322 |
+
# Compute the number of edges in the complete graph -- `n` nodes,
|
323 |
+
# directed or undirected, depending on `G`
|
324 |
+
n = len(G)
|
325 |
+
total_pairs = n * (n - 1)
|
326 |
+
if not G.is_directed():
|
327 |
+
total_pairs //= 2
|
328 |
+
|
329 |
+
intra_community_edges = 0
|
330 |
+
inter_community_non_edges = possible_inter_community_edges
|
331 |
+
|
332 |
+
# Iterate over the links to count `intra_community_edges` and `inter_community_non_edges`
|
333 |
+
for e in G.edges():
|
334 |
+
if node_community[e[0]] == node_community[e[1]]:
|
335 |
+
intra_community_edges += 1
|
336 |
+
else:
|
337 |
+
inter_community_non_edges -= 1
|
338 |
+
|
339 |
+
coverage = intra_community_edges / len(G.edges)
|
340 |
+
|
341 |
+
if G.is_multigraph():
|
342 |
+
performance = -1.0
|
343 |
+
else:
|
344 |
+
performance = (intra_community_edges + inter_community_non_edges) / total_pairs
|
345 |
+
|
346 |
+
return coverage, performance
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__init__.py
ADDED
File without changes
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (203 Bytes). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-310.pyc
ADDED
Binary file (3.31 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-310.pyc
ADDED
Binary file (3.38 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_divisive.cpython-310.pyc
ADDED
Binary file (3.71 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-310.pyc
ADDED
Binary file (2.95 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-310.pyc
ADDED
Binary file (3.07 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-310.pyc
ADDED
Binary file (10.3 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-310.pyc
ADDED
Binary file (7.33 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-310.pyc
ADDED
Binary file (3.55 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-310.pyc
ADDED
Binary file (8.08 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-310.pyc
ADDED
Binary file (4.67 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-310.pyc
ADDED
Binary file (1.09 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py
ADDED
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
from networkx import Graph, NetworkXError
|
5 |
+
from networkx.algorithms.community import asyn_fluidc
|
6 |
+
|
7 |
+
|
8 |
+
@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
|
9 |
+
def test_raises_on_directed_and_multigraphs(graph_constructor):
|
10 |
+
G = graph_constructor([(0, 1), (1, 2)])
|
11 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
12 |
+
nx.community.asyn_fluidc(G, 1)
|
13 |
+
|
14 |
+
|
15 |
+
def test_exceptions():
|
16 |
+
test = Graph()
|
17 |
+
test.add_node("a")
|
18 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, "hi")
|
19 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, -1)
|
20 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, 3)
|
21 |
+
test.add_node("b")
|
22 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, 1)
|
23 |
+
|
24 |
+
|
25 |
+
def test_single_node():
|
26 |
+
test = Graph()
|
27 |
+
|
28 |
+
test.add_node("a")
|
29 |
+
|
30 |
+
# ground truth
|
31 |
+
ground_truth = {frozenset(["a"])}
|
32 |
+
|
33 |
+
communities = asyn_fluidc(test, 1)
|
34 |
+
result = {frozenset(c) for c in communities}
|
35 |
+
assert result == ground_truth
|
36 |
+
|
37 |
+
|
38 |
+
def test_two_nodes():
|
39 |
+
test = Graph()
|
40 |
+
|
41 |
+
test.add_edge("a", "b")
|
42 |
+
|
43 |
+
# ground truth
|
44 |
+
ground_truth = {frozenset(["a"]), frozenset(["b"])}
|
45 |
+
|
46 |
+
communities = asyn_fluidc(test, 2)
|
47 |
+
result = {frozenset(c) for c in communities}
|
48 |
+
assert result == ground_truth
|
49 |
+
|
50 |
+
|
51 |
+
def test_two_clique_communities():
|
52 |
+
test = Graph()
|
53 |
+
|
54 |
+
# c1
|
55 |
+
test.add_edge("a", "b")
|
56 |
+
test.add_edge("a", "c")
|
57 |
+
test.add_edge("b", "c")
|
58 |
+
|
59 |
+
# connection
|
60 |
+
test.add_edge("c", "d")
|
61 |
+
|
62 |
+
# c2
|
63 |
+
test.add_edge("d", "e")
|
64 |
+
test.add_edge("d", "f")
|
65 |
+
test.add_edge("f", "e")
|
66 |
+
|
67 |
+
# ground truth
|
68 |
+
ground_truth = {frozenset(["a", "c", "b"]), frozenset(["e", "d", "f"])}
|
69 |
+
|
70 |
+
communities = asyn_fluidc(test, 2, seed=7)
|
71 |
+
result = {frozenset(c) for c in communities}
|
72 |
+
assert result == ground_truth
|
73 |
+
|
74 |
+
|
75 |
+
def test_five_clique_ring():
|
76 |
+
test = Graph()
|
77 |
+
|
78 |
+
# c1
|
79 |
+
test.add_edge("1a", "1b")
|
80 |
+
test.add_edge("1a", "1c")
|
81 |
+
test.add_edge("1a", "1d")
|
82 |
+
test.add_edge("1b", "1c")
|
83 |
+
test.add_edge("1b", "1d")
|
84 |
+
test.add_edge("1c", "1d")
|
85 |
+
|
86 |
+
# c2
|
87 |
+
test.add_edge("2a", "2b")
|
88 |
+
test.add_edge("2a", "2c")
|
89 |
+
test.add_edge("2a", "2d")
|
90 |
+
test.add_edge("2b", "2c")
|
91 |
+
test.add_edge("2b", "2d")
|
92 |
+
test.add_edge("2c", "2d")
|
93 |
+
|
94 |
+
# c3
|
95 |
+
test.add_edge("3a", "3b")
|
96 |
+
test.add_edge("3a", "3c")
|
97 |
+
test.add_edge("3a", "3d")
|
98 |
+
test.add_edge("3b", "3c")
|
99 |
+
test.add_edge("3b", "3d")
|
100 |
+
test.add_edge("3c", "3d")
|
101 |
+
|
102 |
+
# c4
|
103 |
+
test.add_edge("4a", "4b")
|
104 |
+
test.add_edge("4a", "4c")
|
105 |
+
test.add_edge("4a", "4d")
|
106 |
+
test.add_edge("4b", "4c")
|
107 |
+
test.add_edge("4b", "4d")
|
108 |
+
test.add_edge("4c", "4d")
|
109 |
+
|
110 |
+
# c5
|
111 |
+
test.add_edge("5a", "5b")
|
112 |
+
test.add_edge("5a", "5c")
|
113 |
+
test.add_edge("5a", "5d")
|
114 |
+
test.add_edge("5b", "5c")
|
115 |
+
test.add_edge("5b", "5d")
|
116 |
+
test.add_edge("5c", "5d")
|
117 |
+
|
118 |
+
# connections
|
119 |
+
test.add_edge("1a", "2c")
|
120 |
+
test.add_edge("2a", "3c")
|
121 |
+
test.add_edge("3a", "4c")
|
122 |
+
test.add_edge("4a", "5c")
|
123 |
+
test.add_edge("5a", "1c")
|
124 |
+
|
125 |
+
# ground truth
|
126 |
+
ground_truth = {
|
127 |
+
frozenset(["1a", "1b", "1c", "1d"]),
|
128 |
+
frozenset(["2a", "2b", "2c", "2d"]),
|
129 |
+
frozenset(["3a", "3b", "3c", "3d"]),
|
130 |
+
frozenset(["4a", "4b", "4c", "4d"]),
|
131 |
+
frozenset(["5a", "5b", "5c", "5d"]),
|
132 |
+
}
|
133 |
+
|
134 |
+
communities = asyn_fluidc(test, 5, seed=9)
|
135 |
+
result = {frozenset(c) for c in communities}
|
136 |
+
assert result == ground_truth
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_centrality.py
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Unit tests for the :mod:`networkx.algorithms.community.centrality`
|
2 |
+
module.
|
3 |
+
|
4 |
+
"""
|
5 |
+
from operator import itemgetter
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
|
9 |
+
|
10 |
+
def set_of_sets(iterable):
|
11 |
+
return set(map(frozenset, iterable))
|
12 |
+
|
13 |
+
|
14 |
+
def validate_communities(result, expected):
|
15 |
+
assert set_of_sets(result) == set_of_sets(expected)
|
16 |
+
|
17 |
+
|
18 |
+
def validate_possible_communities(result, *expected):
|
19 |
+
assert any(set_of_sets(result) == set_of_sets(p) for p in expected)
|
20 |
+
|
21 |
+
|
22 |
+
class TestGirvanNewman:
|
23 |
+
"""Unit tests for the
|
24 |
+
:func:`networkx.algorithms.community.centrality.girvan_newman`
|
25 |
+
function.
|
26 |
+
|
27 |
+
"""
|
28 |
+
|
29 |
+
def test_no_edges(self):
|
30 |
+
G = nx.empty_graph(3)
|
31 |
+
communities = list(nx.community.girvan_newman(G))
|
32 |
+
assert len(communities) == 1
|
33 |
+
validate_communities(communities[0], [{0}, {1}, {2}])
|
34 |
+
|
35 |
+
def test_undirected(self):
|
36 |
+
# Start with the graph .-.-.-.
|
37 |
+
G = nx.path_graph(4)
|
38 |
+
communities = list(nx.community.girvan_newman(G))
|
39 |
+
assert len(communities) == 3
|
40 |
+
# After one removal, we get the graph .-. .-.
|
41 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
42 |
+
# After the next, we get the graph .-. . ., but there are two
|
43 |
+
# symmetric possible versions.
|
44 |
+
validate_possible_communities(
|
45 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
46 |
+
)
|
47 |
+
# After the last removal, we always get the empty graph.
|
48 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
49 |
+
|
50 |
+
def test_directed(self):
|
51 |
+
G = nx.DiGraph(nx.path_graph(4))
|
52 |
+
communities = list(nx.community.girvan_newman(G))
|
53 |
+
assert len(communities) == 3
|
54 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
55 |
+
validate_possible_communities(
|
56 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
57 |
+
)
|
58 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
59 |
+
|
60 |
+
def test_selfloops(self):
|
61 |
+
G = nx.path_graph(4)
|
62 |
+
G.add_edge(0, 0)
|
63 |
+
G.add_edge(2, 2)
|
64 |
+
communities = list(nx.community.girvan_newman(G))
|
65 |
+
assert len(communities) == 3
|
66 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
67 |
+
validate_possible_communities(
|
68 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
69 |
+
)
|
70 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
71 |
+
|
72 |
+
def test_most_valuable_edge(self):
|
73 |
+
G = nx.Graph()
|
74 |
+
G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)])
|
75 |
+
# Let the most valuable edge be the one with the highest weight.
|
76 |
+
|
77 |
+
def heaviest(G):
|
78 |
+
return max(G.edges(data="weight"), key=itemgetter(2))[:2]
|
79 |
+
|
80 |
+
communities = list(nx.community.girvan_newman(G, heaviest))
|
81 |
+
assert len(communities) == 3
|
82 |
+
validate_communities(communities[0], [{0}, {1, 2, 3}])
|
83 |
+
validate_communities(communities[1], [{0}, {1}, {2, 3}])
|
84 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_divisive.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
|
6 |
+
def test_edge_betweenness_partition():
|
7 |
+
G = nx.barbell_graph(3, 0)
|
8 |
+
C = nx.community.edge_betweenness_partition(G, 2)
|
9 |
+
answer = [{0, 1, 2}, {3, 4, 5}]
|
10 |
+
assert len(C) == len(answer)
|
11 |
+
for s in answer:
|
12 |
+
assert s in C
|
13 |
+
|
14 |
+
G = nx.barbell_graph(3, 1)
|
15 |
+
C = nx.community.edge_betweenness_partition(G, 3)
|
16 |
+
answer = [{0, 1, 2}, {4, 5, 6}, {3}]
|
17 |
+
assert len(C) == len(answer)
|
18 |
+
for s in answer:
|
19 |
+
assert s in C
|
20 |
+
|
21 |
+
C = nx.community.edge_betweenness_partition(G, 7)
|
22 |
+
answer = [{n} for n in G]
|
23 |
+
assert len(C) == len(answer)
|
24 |
+
for s in answer:
|
25 |
+
assert s in C
|
26 |
+
|
27 |
+
C = nx.community.edge_betweenness_partition(G, 1)
|
28 |
+
assert C == [set(G)]
|
29 |
+
|
30 |
+
C = nx.community.edge_betweenness_partition(G, 1, weight="weight")
|
31 |
+
assert C == [set(G)]
|
32 |
+
|
33 |
+
with pytest.raises(nx.NetworkXError):
|
34 |
+
nx.community.edge_betweenness_partition(G, 0)
|
35 |
+
|
36 |
+
with pytest.raises(nx.NetworkXError):
|
37 |
+
nx.community.edge_betweenness_partition(G, -1)
|
38 |
+
|
39 |
+
with pytest.raises(nx.NetworkXError):
|
40 |
+
nx.community.edge_betweenness_partition(G, 10)
|
41 |
+
|
42 |
+
|
43 |
+
def test_edge_current_flow_betweenness_partition():
|
44 |
+
pytest.importorskip("scipy")
|
45 |
+
|
46 |
+
G = nx.barbell_graph(3, 0)
|
47 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 2)
|
48 |
+
answer = [{0, 1, 2}, {3, 4, 5}]
|
49 |
+
assert len(C) == len(answer)
|
50 |
+
for s in answer:
|
51 |
+
assert s in C
|
52 |
+
|
53 |
+
G = nx.barbell_graph(3, 1)
|
54 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 2)
|
55 |
+
answers = [[{0, 1, 2, 3}, {4, 5, 6}], [{0, 1, 2}, {3, 4, 5, 6}]]
|
56 |
+
assert len(C) == len(answers[0])
|
57 |
+
assert any(all(s in answer for s in C) for answer in answers)
|
58 |
+
|
59 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 3)
|
60 |
+
answer = [{0, 1, 2}, {4, 5, 6}, {3}]
|
61 |
+
assert len(C) == len(answer)
|
62 |
+
for s in answer:
|
63 |
+
assert s in C
|
64 |
+
|
65 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 4)
|
66 |
+
answers = [[{1, 2}, {4, 5, 6}, {3}, {0}], [{0, 1, 2}, {5, 6}, {3}, {4}]]
|
67 |
+
assert len(C) == len(answers[0])
|
68 |
+
assert any(all(s in answer for s in C) for answer in answers)
|
69 |
+
|
70 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 5)
|
71 |
+
answer = [{1, 2}, {5, 6}, {3}, {0}, {4}]
|
72 |
+
assert len(C) == len(answer)
|
73 |
+
for s in answer:
|
74 |
+
assert s in C
|
75 |
+
|
76 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 6)
|
77 |
+
answers = [[{2}, {5, 6}, {3}, {0}, {4}, {1}], [{1, 2}, {6}, {3}, {0}, {4}, {5}]]
|
78 |
+
assert len(C) == len(answers[0])
|
79 |
+
assert any(all(s in answer for s in C) for answer in answers)
|
80 |
+
|
81 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 7)
|
82 |
+
answer = [{n} for n in G]
|
83 |
+
assert len(C) == len(answer)
|
84 |
+
for s in answer:
|
85 |
+
assert s in C
|
86 |
+
|
87 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 1)
|
88 |
+
assert C == [set(G)]
|
89 |
+
|
90 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, 1, weight="weight")
|
91 |
+
assert C == [set(G)]
|
92 |
+
|
93 |
+
with pytest.raises(nx.NetworkXError):
|
94 |
+
nx.community.edge_current_flow_betweenness_partition(G, 0)
|
95 |
+
|
96 |
+
with pytest.raises(nx.NetworkXError):
|
97 |
+
nx.community.edge_current_flow_betweenness_partition(G, -1)
|
98 |
+
|
99 |
+
with pytest.raises(nx.NetworkXError):
|
100 |
+
nx.community.edge_current_flow_betweenness_partition(G, 10)
|
101 |
+
|
102 |
+
N = 10
|
103 |
+
G = nx.empty_graph(N)
|
104 |
+
for i in range(2, N - 1):
|
105 |
+
C = nx.community.edge_current_flow_betweenness_partition(G, i)
|
106 |
+
assert C == [{n} for n in G]
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_kclique.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import combinations
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
|
8 |
+
def test_overlapping_K5():
|
9 |
+
G = nx.Graph()
|
10 |
+
G.add_edges_from(combinations(range(5), 2)) # Add a five clique
|
11 |
+
G.add_edges_from(combinations(range(2, 7), 2)) # Add another five clique
|
12 |
+
c = list(nx.community.k_clique_communities(G, 4))
|
13 |
+
assert c == [frozenset(range(7))]
|
14 |
+
c = set(nx.community.k_clique_communities(G, 5))
|
15 |
+
assert c == {frozenset(range(5)), frozenset(range(2, 7))}
|
16 |
+
|
17 |
+
|
18 |
+
def test_isolated_K5():
|
19 |
+
G = nx.Graph()
|
20 |
+
G.add_edges_from(combinations(range(5), 2)) # Add a five clique
|
21 |
+
G.add_edges_from(combinations(range(5, 10), 2)) # Add another five clique
|
22 |
+
c = set(nx.community.k_clique_communities(G, 5))
|
23 |
+
assert c == {frozenset(range(5)), frozenset(range(5, 10))}
|
24 |
+
|
25 |
+
|
26 |
+
class TestZacharyKarateClub:
|
27 |
+
def setup_method(self):
|
28 |
+
self.G = nx.karate_club_graph()
|
29 |
+
|
30 |
+
def _check_communities(self, k, expected):
|
31 |
+
communities = set(nx.community.k_clique_communities(self.G, k))
|
32 |
+
assert communities == expected
|
33 |
+
|
34 |
+
def test_k2(self):
|
35 |
+
# clique percolation with k=2 is just connected components
|
36 |
+
expected = {frozenset(self.G)}
|
37 |
+
self._check_communities(2, expected)
|
38 |
+
|
39 |
+
def test_k3(self):
|
40 |
+
comm1 = [
|
41 |
+
0,
|
42 |
+
1,
|
43 |
+
2,
|
44 |
+
3,
|
45 |
+
7,
|
46 |
+
8,
|
47 |
+
12,
|
48 |
+
13,
|
49 |
+
14,
|
50 |
+
15,
|
51 |
+
17,
|
52 |
+
18,
|
53 |
+
19,
|
54 |
+
20,
|
55 |
+
21,
|
56 |
+
22,
|
57 |
+
23,
|
58 |
+
26,
|
59 |
+
27,
|
60 |
+
28,
|
61 |
+
29,
|
62 |
+
30,
|
63 |
+
31,
|
64 |
+
32,
|
65 |
+
33,
|
66 |
+
]
|
67 |
+
comm2 = [0, 4, 5, 6, 10, 16]
|
68 |
+
comm3 = [24, 25, 31]
|
69 |
+
expected = {frozenset(comm1), frozenset(comm2), frozenset(comm3)}
|
70 |
+
self._check_communities(3, expected)
|
71 |
+
|
72 |
+
def test_k4(self):
|
73 |
+
expected = {
|
74 |
+
frozenset([0, 1, 2, 3, 7, 13]),
|
75 |
+
frozenset([8, 32, 30, 33]),
|
76 |
+
frozenset([32, 33, 29, 23]),
|
77 |
+
}
|
78 |
+
self._check_communities(4, expected)
|
79 |
+
|
80 |
+
def test_k5(self):
|
81 |
+
expected = {frozenset([0, 1, 2, 3, 7, 13])}
|
82 |
+
self._check_communities(5, expected)
|
83 |
+
|
84 |
+
def test_k6(self):
|
85 |
+
expected = set()
|
86 |
+
self._check_communities(6, expected)
|
87 |
+
|
88 |
+
|
89 |
+
def test_bad_k():
|
90 |
+
with pytest.raises(nx.NetworkXError):
|
91 |
+
list(nx.community.k_clique_communities(nx.Graph(), 1))
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Unit tests for the :mod:`networkx.algorithms.community.kernighan_lin`
|
2 |
+
module.
|
3 |
+
"""
|
4 |
+
from itertools import permutations
|
5 |
+
|
6 |
+
import pytest
|
7 |
+
|
8 |
+
import networkx as nx
|
9 |
+
from networkx.algorithms.community import kernighan_lin_bisection
|
10 |
+
|
11 |
+
|
12 |
+
def assert_partition_equal(x, y):
|
13 |
+
assert set(map(frozenset, x)) == set(map(frozenset, y))
|
14 |
+
|
15 |
+
|
16 |
+
def test_partition():
|
17 |
+
G = nx.barbell_graph(3, 0)
|
18 |
+
C = kernighan_lin_bisection(G)
|
19 |
+
assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}])
|
20 |
+
|
21 |
+
|
22 |
+
def test_partition_argument():
|
23 |
+
G = nx.barbell_graph(3, 0)
|
24 |
+
partition = [{0, 1, 2}, {3, 4, 5}]
|
25 |
+
C = kernighan_lin_bisection(G, partition)
|
26 |
+
assert_partition_equal(C, partition)
|
27 |
+
|
28 |
+
|
29 |
+
def test_partition_argument_non_integer_nodes():
|
30 |
+
G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
|
31 |
+
partition = ({"A", "B"}, {"C", "D"})
|
32 |
+
C = kernighan_lin_bisection(G, partition)
|
33 |
+
assert_partition_equal(C, partition)
|
34 |
+
|
35 |
+
|
36 |
+
def test_seed_argument():
|
37 |
+
G = nx.barbell_graph(3, 0)
|
38 |
+
C = kernighan_lin_bisection(G, seed=1)
|
39 |
+
assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}])
|
40 |
+
|
41 |
+
|
42 |
+
def test_non_disjoint_partition():
|
43 |
+
with pytest.raises(nx.NetworkXError):
|
44 |
+
G = nx.barbell_graph(3, 0)
|
45 |
+
partition = ({0, 1, 2}, {2, 3, 4, 5})
|
46 |
+
kernighan_lin_bisection(G, partition)
|
47 |
+
|
48 |
+
|
49 |
+
def test_too_many_blocks():
|
50 |
+
with pytest.raises(nx.NetworkXError):
|
51 |
+
G = nx.barbell_graph(3, 0)
|
52 |
+
partition = ({0, 1}, {2}, {3, 4, 5})
|
53 |
+
kernighan_lin_bisection(G, partition)
|
54 |
+
|
55 |
+
|
56 |
+
def test_multigraph():
|
57 |
+
G = nx.cycle_graph(4)
|
58 |
+
M = nx.MultiGraph(G.edges())
|
59 |
+
M.add_edges_from(G.edges())
|
60 |
+
M.remove_edge(1, 2)
|
61 |
+
for labels in permutations(range(4)):
|
62 |
+
mapping = dict(zip(M, labels))
|
63 |
+
A, B = kernighan_lin_bisection(nx.relabel_nodes(M, mapping), seed=0)
|
64 |
+
assert_partition_equal(
|
65 |
+
[A, B], [{mapping[0], mapping[1]}, {mapping[2], mapping[3]}]
|
66 |
+
)
|
67 |
+
|
68 |
+
|
69 |
+
def test_max_iter_argument():
|
70 |
+
G = nx.Graph(
|
71 |
+
[
|
72 |
+
("A", "B", {"weight": 1}),
|
73 |
+
("A", "C", {"weight": 2}),
|
74 |
+
("A", "D", {"weight": 3}),
|
75 |
+
("A", "E", {"weight": 2}),
|
76 |
+
("A", "F", {"weight": 4}),
|
77 |
+
("B", "C", {"weight": 1}),
|
78 |
+
("B", "D", {"weight": 4}),
|
79 |
+
("B", "E", {"weight": 2}),
|
80 |
+
("B", "F", {"weight": 1}),
|
81 |
+
("C", "D", {"weight": 3}),
|
82 |
+
("C", "E", {"weight": 2}),
|
83 |
+
("C", "F", {"weight": 1}),
|
84 |
+
("D", "E", {"weight": 4}),
|
85 |
+
("D", "F", {"weight": 3}),
|
86 |
+
("E", "F", {"weight": 2}),
|
87 |
+
]
|
88 |
+
)
|
89 |
+
partition = ({"A", "B", "C"}, {"D", "E", "F"})
|
90 |
+
C = kernighan_lin_bisection(G, partition, max_iter=1)
|
91 |
+
assert_partition_equal(C, ({"A", "F", "C"}, {"D", "E", "B"}))
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_label_propagation.py
ADDED
@@ -0,0 +1,241 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import chain, combinations
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
|
8 |
+
def test_directed_not_supported():
|
9 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
10 |
+
# not supported for directed graphs
|
11 |
+
test = nx.DiGraph()
|
12 |
+
test.add_edge("a", "b")
|
13 |
+
test.add_edge("a", "c")
|
14 |
+
test.add_edge("b", "d")
|
15 |
+
result = nx.community.label_propagation_communities(test)
|
16 |
+
|
17 |
+
|
18 |
+
def test_iterator_vs_iterable():
|
19 |
+
G = nx.empty_graph("a")
|
20 |
+
assert list(nx.community.label_propagation_communities(G)) == [{"a"}]
|
21 |
+
for community in nx.community.label_propagation_communities(G):
|
22 |
+
assert community == {"a"}
|
23 |
+
pytest.raises(TypeError, next, nx.community.label_propagation_communities(G))
|
24 |
+
|
25 |
+
|
26 |
+
def test_one_node():
|
27 |
+
test = nx.Graph()
|
28 |
+
test.add_node("a")
|
29 |
+
|
30 |
+
# The expected communities are:
|
31 |
+
ground_truth = {frozenset(["a"])}
|
32 |
+
|
33 |
+
communities = nx.community.label_propagation_communities(test)
|
34 |
+
result = {frozenset(c) for c in communities}
|
35 |
+
assert result == ground_truth
|
36 |
+
|
37 |
+
|
38 |
+
def test_unconnected_communities():
|
39 |
+
test = nx.Graph()
|
40 |
+
# community 1
|
41 |
+
test.add_edge("a", "c")
|
42 |
+
test.add_edge("a", "d")
|
43 |
+
test.add_edge("d", "c")
|
44 |
+
# community 2
|
45 |
+
test.add_edge("b", "e")
|
46 |
+
test.add_edge("e", "f")
|
47 |
+
test.add_edge("f", "b")
|
48 |
+
|
49 |
+
# The expected communities are:
|
50 |
+
ground_truth = {frozenset(["a", "c", "d"]), frozenset(["b", "e", "f"])}
|
51 |
+
|
52 |
+
communities = nx.community.label_propagation_communities(test)
|
53 |
+
result = {frozenset(c) for c in communities}
|
54 |
+
assert result == ground_truth
|
55 |
+
|
56 |
+
|
57 |
+
def test_connected_communities():
|
58 |
+
test = nx.Graph()
|
59 |
+
# community 1
|
60 |
+
test.add_edge("a", "b")
|
61 |
+
test.add_edge("c", "a")
|
62 |
+
test.add_edge("c", "b")
|
63 |
+
test.add_edge("d", "a")
|
64 |
+
test.add_edge("d", "b")
|
65 |
+
test.add_edge("d", "c")
|
66 |
+
test.add_edge("e", "a")
|
67 |
+
test.add_edge("e", "b")
|
68 |
+
test.add_edge("e", "c")
|
69 |
+
test.add_edge("e", "d")
|
70 |
+
# community 2
|
71 |
+
test.add_edge("1", "2")
|
72 |
+
test.add_edge("3", "1")
|
73 |
+
test.add_edge("3", "2")
|
74 |
+
test.add_edge("4", "1")
|
75 |
+
test.add_edge("4", "2")
|
76 |
+
test.add_edge("4", "3")
|
77 |
+
test.add_edge("5", "1")
|
78 |
+
test.add_edge("5", "2")
|
79 |
+
test.add_edge("5", "3")
|
80 |
+
test.add_edge("5", "4")
|
81 |
+
# edge between community 1 and 2
|
82 |
+
test.add_edge("a", "1")
|
83 |
+
# community 3
|
84 |
+
test.add_edge("x", "y")
|
85 |
+
# community 4 with only a single node
|
86 |
+
test.add_node("z")
|
87 |
+
|
88 |
+
# The expected communities are:
|
89 |
+
ground_truth1 = {
|
90 |
+
frozenset(["a", "b", "c", "d", "e"]),
|
91 |
+
frozenset(["1", "2", "3", "4", "5"]),
|
92 |
+
frozenset(["x", "y"]),
|
93 |
+
frozenset(["z"]),
|
94 |
+
}
|
95 |
+
ground_truth2 = {
|
96 |
+
frozenset(["a", "b", "c", "d", "e", "1", "2", "3", "4", "5"]),
|
97 |
+
frozenset(["x", "y"]),
|
98 |
+
frozenset(["z"]),
|
99 |
+
}
|
100 |
+
ground_truth = (ground_truth1, ground_truth2)
|
101 |
+
|
102 |
+
communities = nx.community.label_propagation_communities(test)
|
103 |
+
result = {frozenset(c) for c in communities}
|
104 |
+
assert result in ground_truth
|
105 |
+
|
106 |
+
|
107 |
+
def test_termination():
|
108 |
+
# ensure termination of asyn_lpa_communities in two cases
|
109 |
+
# that led to an endless loop in a previous version
|
110 |
+
test1 = nx.karate_club_graph()
|
111 |
+
test2 = nx.caveman_graph(2, 10)
|
112 |
+
test2.add_edges_from([(0, 20), (20, 10)])
|
113 |
+
nx.community.asyn_lpa_communities(test1)
|
114 |
+
nx.community.asyn_lpa_communities(test2)
|
115 |
+
|
116 |
+
|
117 |
+
class TestAsynLpaCommunities:
|
118 |
+
def _check_communities(self, G, expected):
|
119 |
+
"""Checks that the communities computed from the given graph ``G``
|
120 |
+
using the :func:`~networkx.asyn_lpa_communities` function match
|
121 |
+
the set of nodes given in ``expected``.
|
122 |
+
|
123 |
+
``expected`` must be a :class:`set` of :class:`frozenset`
|
124 |
+
instances, each element of which is a node in the graph.
|
125 |
+
|
126 |
+
"""
|
127 |
+
communities = nx.community.asyn_lpa_communities(G)
|
128 |
+
result = {frozenset(c) for c in communities}
|
129 |
+
assert result == expected
|
130 |
+
|
131 |
+
def test_null_graph(self):
|
132 |
+
G = nx.null_graph()
|
133 |
+
ground_truth = set()
|
134 |
+
self._check_communities(G, ground_truth)
|
135 |
+
|
136 |
+
def test_single_node(self):
|
137 |
+
G = nx.empty_graph(1)
|
138 |
+
ground_truth = {frozenset([0])}
|
139 |
+
self._check_communities(G, ground_truth)
|
140 |
+
|
141 |
+
def test_simple_communities(self):
|
142 |
+
# This graph is the disjoint union of two triangles.
|
143 |
+
G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"])
|
144 |
+
ground_truth = {frozenset("abc"), frozenset("def")}
|
145 |
+
self._check_communities(G, ground_truth)
|
146 |
+
|
147 |
+
def test_seed_argument(self):
|
148 |
+
G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"])
|
149 |
+
ground_truth = {frozenset("abc"), frozenset("def")}
|
150 |
+
communities = nx.community.asyn_lpa_communities(G, seed=1)
|
151 |
+
result = {frozenset(c) for c in communities}
|
152 |
+
assert result == ground_truth
|
153 |
+
|
154 |
+
def test_several_communities(self):
|
155 |
+
# This graph is the disjoint union of five triangles.
|
156 |
+
ground_truth = {frozenset(range(3 * i, 3 * (i + 1))) for i in range(5)}
|
157 |
+
edges = chain.from_iterable(combinations(c, 2) for c in ground_truth)
|
158 |
+
G = nx.Graph(edges)
|
159 |
+
self._check_communities(G, ground_truth)
|
160 |
+
|
161 |
+
|
162 |
+
class TestFastLabelPropagationCommunities:
|
163 |
+
N = 100 # number of nodes
|
164 |
+
K = 15 # average node degree
|
165 |
+
|
166 |
+
def _check_communities(self, G, truth, weight=None, seed=42):
|
167 |
+
C = nx.community.fast_label_propagation_communities(G, weight=weight, seed=seed)
|
168 |
+
assert {frozenset(c) for c in C} == truth
|
169 |
+
|
170 |
+
def test_null_graph(self):
|
171 |
+
G = nx.null_graph()
|
172 |
+
truth = set()
|
173 |
+
self._check_communities(G, truth)
|
174 |
+
|
175 |
+
def test_empty_graph(self):
|
176 |
+
G = nx.empty_graph(self.N)
|
177 |
+
truth = {frozenset([i]) for i in G}
|
178 |
+
self._check_communities(G, truth)
|
179 |
+
|
180 |
+
def test_star_graph(self):
|
181 |
+
G = nx.star_graph(self.N)
|
182 |
+
truth = {frozenset(G)}
|
183 |
+
self._check_communities(G, truth)
|
184 |
+
|
185 |
+
def test_complete_graph(self):
|
186 |
+
G = nx.complete_graph(self.N)
|
187 |
+
truth = {frozenset(G)}
|
188 |
+
self._check_communities(G, truth)
|
189 |
+
|
190 |
+
def test_bipartite_graph(self):
|
191 |
+
G = nx.complete_bipartite_graph(self.N // 2, self.N // 2)
|
192 |
+
truth = {frozenset(G)}
|
193 |
+
self._check_communities(G, truth)
|
194 |
+
|
195 |
+
def test_random_graph(self):
|
196 |
+
G = nx.gnm_random_graph(self.N, self.N * self.K // 2, seed=42)
|
197 |
+
truth = {frozenset(G)}
|
198 |
+
self._check_communities(G, truth)
|
199 |
+
|
200 |
+
def test_disjoin_cliques(self):
|
201 |
+
G = nx.Graph(["ab", "AB", "AC", "BC", "12", "13", "14", "23", "24", "34"])
|
202 |
+
truth = {frozenset("ab"), frozenset("ABC"), frozenset("1234")}
|
203 |
+
self._check_communities(G, truth)
|
204 |
+
|
205 |
+
def test_ring_of_cliques(self):
|
206 |
+
N, K = self.N, self.K
|
207 |
+
G = nx.ring_of_cliques(N, K)
|
208 |
+
truth = {frozenset([K * i + k for k in range(K)]) for i in range(N)}
|
209 |
+
self._check_communities(G, truth)
|
210 |
+
|
211 |
+
def test_larger_graph(self):
|
212 |
+
G = nx.gnm_random_graph(100 * self.N, 50 * self.N * self.K, seed=42)
|
213 |
+
nx.community.fast_label_propagation_communities(G)
|
214 |
+
|
215 |
+
def test_graph_type(self):
|
216 |
+
G1 = nx.complete_graph(self.N, nx.MultiDiGraph())
|
217 |
+
G2 = nx.MultiGraph(G1)
|
218 |
+
G3 = nx.DiGraph(G1)
|
219 |
+
G4 = nx.Graph(G1)
|
220 |
+
truth = {frozenset(G1)}
|
221 |
+
self._check_communities(G1, truth)
|
222 |
+
self._check_communities(G2, truth)
|
223 |
+
self._check_communities(G3, truth)
|
224 |
+
self._check_communities(G4, truth)
|
225 |
+
|
226 |
+
def test_weight_argument(self):
|
227 |
+
G = nx.MultiDiGraph()
|
228 |
+
G.add_edge(1, 2, weight=1.41)
|
229 |
+
G.add_edge(2, 1, weight=1.41)
|
230 |
+
G.add_edge(2, 3)
|
231 |
+
G.add_edge(3, 4, weight=3.14)
|
232 |
+
truth = {frozenset({1, 2}), frozenset({3, 4})}
|
233 |
+
self._check_communities(G, truth, weight="weight")
|
234 |
+
|
235 |
+
def test_seed_argument(self):
|
236 |
+
G = nx.karate_club_graph()
|
237 |
+
C = nx.community.fast_label_propagation_communities(G, seed=2023)
|
238 |
+
truth = {frozenset(c) for c in C}
|
239 |
+
self._check_communities(G, truth, seed=2023)
|
240 |
+
# smoke test that seed=None works
|
241 |
+
C = nx.community.fast_label_propagation_communities(G, seed=None)
|
venv/lib/python3.10/site-packages/networkx/algorithms/community/tests/test_louvain.py
ADDED
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
|
6 |
+
def test_modularity_increase():
|
7 |
+
G = nx.LFR_benchmark_graph(
|
8 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
9 |
+
)
|
10 |
+
partition = [{u} for u in G.nodes()]
|
11 |
+
mod = nx.community.modularity(G, partition)
|
12 |
+
partition = nx.community.louvain_communities(G)
|
13 |
+
|
14 |
+
assert nx.community.modularity(G, partition) > mod
|
15 |
+
|
16 |
+
|
17 |
+
def test_valid_partition():
|
18 |
+
G = nx.LFR_benchmark_graph(
|
19 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
20 |
+
)
|
21 |
+
H = G.to_directed()
|
22 |
+
partition = nx.community.louvain_communities(G)
|
23 |
+
partition2 = nx.community.louvain_communities(H)
|
24 |
+
|
25 |
+
assert nx.community.is_partition(G, partition)
|
26 |
+
assert nx.community.is_partition(H, partition2)
|
27 |
+
|
28 |
+
|
29 |
+
def test_karate_club_partition():
|
30 |
+
G = nx.karate_club_graph()
|
31 |
+
part = [
|
32 |
+
{0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
|
33 |
+
{16, 4, 5, 6, 10},
|
34 |
+
{23, 25, 27, 28, 24, 31},
|
35 |
+
{32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
|
36 |
+
]
|
37 |
+
partition = nx.community.louvain_communities(G, seed=2, weight=None)
|
38 |
+
|
39 |
+
assert part == partition
|
40 |
+
|
41 |
+
|
42 |
+
def test_partition_iterator():
|
43 |
+
G = nx.path_graph(15)
|
44 |
+
parts_iter = nx.community.louvain_partitions(G, seed=42)
|
45 |
+
first_part = next(parts_iter)
|
46 |
+
first_copy = [s.copy() for s in first_part]
|
47 |
+
|
48 |
+
# gh-5901 reports sets changing after next partition is yielded
|
49 |
+
assert first_copy[0] == first_part[0]
|
50 |
+
second_part = next(parts_iter)
|
51 |
+
assert first_copy[0] == first_part[0]
|
52 |
+
|
53 |
+
|
54 |
+
def test_undirected_selfloops():
|
55 |
+
G = nx.karate_club_graph()
|
56 |
+
expected_partition = nx.community.louvain_communities(G, seed=2, weight=None)
|
57 |
+
part = [
|
58 |
+
{0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
|
59 |
+
{16, 4, 5, 6, 10},
|
60 |
+
{23, 25, 27, 28, 24, 31},
|
61 |
+
{32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
|
62 |
+
]
|
63 |
+
assert expected_partition == part
|
64 |
+
|
65 |
+
G.add_weighted_edges_from([(i, i, i * 1000) for i in range(9)])
|
66 |
+
# large self-loop weight impacts partition
|
67 |
+
partition = nx.community.louvain_communities(G, seed=2, weight="weight")
|
68 |
+
assert part != partition
|
69 |
+
|
70 |
+
# small self-loop weights aren't enough to impact partition in this graph
|
71 |
+
partition = nx.community.louvain_communities(G, seed=2, weight=None)
|
72 |
+
assert part == partition
|
73 |
+
|
74 |
+
|
75 |
+
def test_directed_selfloops():
|
76 |
+
G = nx.DiGraph()
|
77 |
+
G.add_nodes_from(range(11))
|
78 |
+
G_edges = [
|
79 |
+
(0, 2),
|
80 |
+
(0, 1),
|
81 |
+
(1, 0),
|
82 |
+
(2, 1),
|
83 |
+
(2, 0),
|
84 |
+
(3, 4),
|
85 |
+
(4, 3),
|
86 |
+
(7, 8),
|
87 |
+
(8, 7),
|
88 |
+
(9, 10),
|
89 |
+
(10, 9),
|
90 |
+
]
|
91 |
+
G.add_edges_from(G_edges)
|
92 |
+
G_expected_partition = nx.community.louvain_communities(G, seed=123, weight=None)
|
93 |
+
|
94 |
+
G.add_weighted_edges_from([(i, i, i * 1000) for i in range(3)])
|
95 |
+
# large self-loop weight impacts partition
|
96 |
+
G_partition = nx.community.louvain_communities(G, seed=123, weight="weight")
|
97 |
+
assert G_partition != G_expected_partition
|
98 |
+
|
99 |
+
# small self-loop weights aren't enough to impact partition in this graph
|
100 |
+
G_partition = nx.community.louvain_communities(G, seed=123, weight=None)
|
101 |
+
assert G_partition == G_expected_partition
|
102 |
+
|
103 |
+
|
104 |
+
def test_directed_partition():
|
105 |
+
"""
|
106 |
+
Test 2 cases that were looping infinitely
|
107 |
+
from issues #5175 and #5704
|
108 |
+
"""
|
109 |
+
G = nx.DiGraph()
|
110 |
+
H = nx.DiGraph()
|
111 |
+
G.add_nodes_from(range(10))
|
112 |
+
H.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
|
113 |
+
G_edges = [
|
114 |
+
(0, 2),
|
115 |
+
(0, 1),
|
116 |
+
(1, 0),
|
117 |
+
(2, 1),
|
118 |
+
(2, 0),
|
119 |
+
(3, 4),
|
120 |
+
(4, 3),
|
121 |
+
(7, 8),
|
122 |
+
(8, 7),
|
123 |
+
(9, 10),
|
124 |
+
(10, 9),
|
125 |
+
]
|
126 |
+
H_edges = [
|
127 |
+
(1, 2),
|
128 |
+
(1, 6),
|
129 |
+
(1, 9),
|
130 |
+
(2, 3),
|
131 |
+
(2, 4),
|
132 |
+
(2, 5),
|
133 |
+
(3, 4),
|
134 |
+
(4, 3),
|
135 |
+
(4, 5),
|
136 |
+
(5, 4),
|
137 |
+
(6, 7),
|
138 |
+
(6, 8),
|
139 |
+
(9, 10),
|
140 |
+
(9, 11),
|
141 |
+
(10, 11),
|
142 |
+
(11, 10),
|
143 |
+
]
|
144 |
+
G.add_edges_from(G_edges)
|
145 |
+
H.add_edges_from(H_edges)
|
146 |
+
|
147 |
+
G_expected_partition = [{0, 1, 2}, {3, 4}, {5}, {6}, {8, 7}, {9, 10}]
|
148 |
+
G_partition = nx.community.louvain_communities(G, seed=123, weight=None)
|
149 |
+
|
150 |
+
H_expected_partition = [{2, 3, 4, 5}, {8, 1, 6, 7}, {9, 10, 11}]
|
151 |
+
H_partition = nx.community.louvain_communities(H, seed=123, weight=None)
|
152 |
+
|
153 |
+
assert G_partition == G_expected_partition
|
154 |
+
assert H_partition == H_expected_partition
|
155 |
+
|
156 |
+
|
157 |
+
def test_none_weight_param():
|
158 |
+
G = nx.karate_club_graph()
|
159 |
+
nx.set_edge_attributes(
|
160 |
+
G, {edge: i * i for i, edge in enumerate(G.edges)}, name="foo"
|
161 |
+
)
|
162 |
+
|
163 |
+
part = [
|
164 |
+
{0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
|
165 |
+
{16, 4, 5, 6, 10},
|
166 |
+
{23, 25, 27, 28, 24, 31},
|
167 |
+
{32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
|
168 |
+
]
|
169 |
+
partition1 = nx.community.louvain_communities(G, weight=None, seed=2)
|
170 |
+
partition2 = nx.community.louvain_communities(G, weight="foo", seed=2)
|
171 |
+
partition3 = nx.community.louvain_communities(G, weight="weight", seed=2)
|
172 |
+
|
173 |
+
assert part == partition1
|
174 |
+
assert part != partition2
|
175 |
+
assert part != partition3
|
176 |
+
assert partition2 != partition3
|
177 |
+
|
178 |
+
|
179 |
+
def test_quality():
|
180 |
+
G = nx.LFR_benchmark_graph(
|
181 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
182 |
+
)
|
183 |
+
H = nx.gn_graph(200, seed=1234)
|
184 |
+
I = nx.MultiGraph(G)
|
185 |
+
J = nx.MultiDiGraph(H)
|
186 |
+
|
187 |
+
partition = nx.community.louvain_communities(G)
|
188 |
+
partition2 = nx.community.louvain_communities(H)
|
189 |
+
partition3 = nx.community.louvain_communities(I)
|
190 |
+
partition4 = nx.community.louvain_communities(J)
|
191 |
+
|
192 |
+
quality = nx.community.partition_quality(G, partition)[0]
|
193 |
+
quality2 = nx.community.partition_quality(H, partition2)[0]
|
194 |
+
quality3 = nx.community.partition_quality(I, partition3)[0]
|
195 |
+
quality4 = nx.community.partition_quality(J, partition4)[0]
|
196 |
+
|
197 |
+
assert quality >= 0.65
|
198 |
+
assert quality2 >= 0.65
|
199 |
+
assert quality3 >= 0.65
|
200 |
+
assert quality4 >= 0.65
|
201 |
+
|
202 |
+
|
203 |
+
def test_multigraph():
|
204 |
+
G = nx.karate_club_graph()
|
205 |
+
H = nx.MultiGraph(G)
|
206 |
+
G.add_edge(0, 1, weight=10)
|
207 |
+
H.add_edge(0, 1, weight=9)
|
208 |
+
G.add_edge(0, 9, foo=20)
|
209 |
+
H.add_edge(0, 9, foo=20)
|
210 |
+
|
211 |
+
partition1 = nx.community.louvain_communities(G, seed=1234)
|
212 |
+
partition2 = nx.community.louvain_communities(H, seed=1234)
|
213 |
+
partition3 = nx.community.louvain_communities(H, weight="foo", seed=1234)
|
214 |
+
|
215 |
+
assert partition1 == partition2 != partition3
|
216 |
+
|
217 |
+
|
218 |
+
def test_resolution():
|
219 |
+
G = nx.LFR_benchmark_graph(
|
220 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
221 |
+
)
|
222 |
+
|
223 |
+
partition1 = nx.community.louvain_communities(G, resolution=0.5, seed=12)
|
224 |
+
partition2 = nx.community.louvain_communities(G, seed=12)
|
225 |
+
partition3 = nx.community.louvain_communities(G, resolution=2, seed=12)
|
226 |
+
|
227 |
+
assert len(partition1) <= len(partition2) <= len(partition3)
|
228 |
+
|
229 |
+
|
230 |
+
def test_threshold():
|
231 |
+
G = nx.LFR_benchmark_graph(
|
232 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
233 |
+
)
|
234 |
+
partition1 = nx.community.louvain_communities(G, threshold=0.3, seed=2)
|
235 |
+
partition2 = nx.community.louvain_communities(G, seed=2)
|
236 |
+
mod1 = nx.community.modularity(G, partition1)
|
237 |
+
mod2 = nx.community.modularity(G, partition2)
|
238 |
+
|
239 |
+
assert mod1 <= mod2
|
240 |
+
|
241 |
+
|
242 |
+
def test_empty_graph():
|
243 |
+
G = nx.Graph()
|
244 |
+
G.add_nodes_from(range(5))
|
245 |
+
expected = [{0}, {1}, {2}, {3}, {4}]
|
246 |
+
assert nx.community.louvain_communities(G) == expected
|
247 |
+
|
248 |
+
|
249 |
+
def test_max_level():
|
250 |
+
G = nx.LFR_benchmark_graph(
|
251 |
+
250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
|
252 |
+
)
|
253 |
+
parts_iter = nx.community.louvain_partitions(G, seed=42)
|
254 |
+
for max_level, expected in enumerate(parts_iter, 1):
|
255 |
+
partition = nx.community.louvain_communities(G, max_level=max_level, seed=42)
|
256 |
+
assert partition == expected
|
257 |
+
assert max_level > 1 # Ensure we are actually testing max_level
|
258 |
+
# max_level is an upper limit; it's okay if we stop before it's hit.
|
259 |
+
partition = nx.community.louvain_communities(G, max_level=max_level + 1, seed=42)
|
260 |
+
assert partition == expected
|
261 |
+
with pytest.raises(
|
262 |
+
ValueError, match="max_level argument must be a positive integer"
|
263 |
+
):
|
264 |
+
nx.community.louvain_communities(G, max_level=0)
|