Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ckpts/universal/global_step80/zero/15.input_layernorm.weight/fp32.pt +3 -0
- ckpts/universal/global_step80/zero/24.mlp.dense_h_to_4h.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step80/zero/24.mlp.dense_h_to_4h.weight/fp32.pt +3 -0
- ckpts/universal/global_step80/zero/26.mlp.dense_h_to_4h.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step80/zero/26.mlp.dense_h_to_4h.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step80/zero/5.attention.query_key_value.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step80/zero/5.attention.query_key_value.weight/exp_avg_sq.pt +3 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__init__.py +5 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/connectivity.py +122 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/correlation.py +302 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/mixing.py +254 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/neighbor_degree.py +160 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/pairs.py +118 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__init__.py +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/base_test.py +81 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py +143 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py +123 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py +176 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py +108 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py +87 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/__init__.py +20 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/betweenness_subset.py +274 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/closeness.py +281 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py +341 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/current_flow_closeness.py +95 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/degree_alg.py +149 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/group.py +786 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/katz.py +330 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/second_order.py +141 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/subgraph_alg.py +339 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/__init__.py +0 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py +197 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py +73 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_group.py +278 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py +221 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_reaching.py +117 -0
- venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py +82 -0
ckpts/universal/global_step80/zero/15.input_layernorm.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a78742781f162ff89bf7d1fae2fcf225c794c423b9613c852ec8384ca8b46871
|
3 |
+
size 9293
|
ckpts/universal/global_step80/zero/24.mlp.dense_h_to_4h.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0675b13c77c2f6db2c239e8e09d173d8c8b8bc8793dd23f5be569617b2763d64
|
3 |
+
size 33555627
|
ckpts/universal/global_step80/zero/24.mlp.dense_h_to_4h.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b343ff4efa93c020821c466aa5f4e83dbe206cfff7c7d1424935196c68aeb1ff
|
3 |
+
size 33555533
|
ckpts/universal/global_step80/zero/26.mlp.dense_h_to_4h.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1489f8bf237e2f571ccf5baab7f629ea3fd3b0f17a19fa10f72315026af7e906
|
3 |
+
size 33555612
|
ckpts/universal/global_step80/zero/26.mlp.dense_h_to_4h.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5e133114747f59f6cb148f4ed5bda17900cc4b97e1636d44f1e1a8e6d808649e
|
3 |
+
size 33555627
|
ckpts/universal/global_step80/zero/5.attention.query_key_value.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eea12b6da32f05e353108a0fb7feabe5f22855e56e57fd51c1a21379e1727d5b
|
3 |
+
size 50332828
|
ckpts/universal/global_step80/zero/5.attention.query_key_value.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2a87f789664eb4f9d87f13318194365f6a888e195752c96d9520a23c67564dd0
|
3 |
+
size 50332843
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__init__.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from networkx.algorithms.assortativity.connectivity import *
|
2 |
+
from networkx.algorithms.assortativity.correlation import *
|
3 |
+
from networkx.algorithms.assortativity.mixing import *
|
4 |
+
from networkx.algorithms.assortativity.neighbor_degree import *
|
5 |
+
from networkx.algorithms.assortativity.pairs import *
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (485 Bytes). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-310.pyc
ADDED
Binary file (4.03 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-310.pyc
ADDED
Binary file (9.28 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-310.pyc
ADDED
Binary file (7.61 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-310.pyc
ADDED
Binary file (4.71 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-310.pyc
ADDED
Binary file (3.46 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/connectivity.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections import defaultdict
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
__all__ = ["average_degree_connectivity"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable(edge_attrs="weight")
|
9 |
+
def average_degree_connectivity(
|
10 |
+
G, source="in+out", target="in+out", nodes=None, weight=None
|
11 |
+
):
|
12 |
+
r"""Compute the average degree connectivity of graph.
|
13 |
+
|
14 |
+
The average degree connectivity is the average nearest neighbor degree of
|
15 |
+
nodes with degree k. For weighted graphs, an analogous measure can
|
16 |
+
be computed using the weighted average neighbors degree defined in
|
17 |
+
[1]_, for a node `i`, as
|
18 |
+
|
19 |
+
.. math::
|
20 |
+
|
21 |
+
k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
|
22 |
+
|
23 |
+
where `s_i` is the weighted degree of node `i`,
|
24 |
+
`w_{ij}` is the weight of the edge that links `i` and `j`,
|
25 |
+
and `N(i)` are the neighbors of node `i`.
|
26 |
+
|
27 |
+
Parameters
|
28 |
+
----------
|
29 |
+
G : NetworkX graph
|
30 |
+
|
31 |
+
source : "in"|"out"|"in+out" (default:"in+out")
|
32 |
+
Directed graphs only. Use "in"- or "out"-degree for source node.
|
33 |
+
|
34 |
+
target : "in"|"out"|"in+out" (default:"in+out"
|
35 |
+
Directed graphs only. Use "in"- or "out"-degree for target node.
|
36 |
+
|
37 |
+
nodes : list or iterable (optional)
|
38 |
+
Compute neighbor connectivity for these nodes. The default is all
|
39 |
+
nodes.
|
40 |
+
|
41 |
+
weight : string or None, optional (default=None)
|
42 |
+
The edge attribute that holds the numerical value used as a weight.
|
43 |
+
If None, then each edge has weight 1.
|
44 |
+
|
45 |
+
Returns
|
46 |
+
-------
|
47 |
+
d : dict
|
48 |
+
A dictionary keyed by degree k with the value of average connectivity.
|
49 |
+
|
50 |
+
Raises
|
51 |
+
------
|
52 |
+
NetworkXError
|
53 |
+
If either `source` or `target` are not one of 'in',
|
54 |
+
'out', or 'in+out'.
|
55 |
+
If either `source` or `target` is passed for an undirected graph.
|
56 |
+
|
57 |
+
Examples
|
58 |
+
--------
|
59 |
+
>>> G = nx.path_graph(4)
|
60 |
+
>>> G.edges[1, 2]["weight"] = 3
|
61 |
+
>>> nx.average_degree_connectivity(G)
|
62 |
+
{1: 2.0, 2: 1.5}
|
63 |
+
>>> nx.average_degree_connectivity(G, weight="weight")
|
64 |
+
{1: 2.0, 2: 1.75}
|
65 |
+
|
66 |
+
See Also
|
67 |
+
--------
|
68 |
+
average_neighbor_degree
|
69 |
+
|
70 |
+
References
|
71 |
+
----------
|
72 |
+
.. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
|
73 |
+
"The architecture of complex weighted networks".
|
74 |
+
PNAS 101 (11): 3747–3752 (2004).
|
75 |
+
"""
|
76 |
+
# First, determine the type of neighbors and the type of degree to use.
|
77 |
+
if G.is_directed():
|
78 |
+
if source not in ("in", "out", "in+out"):
|
79 |
+
raise nx.NetworkXError('source must be one of "in", "out", or "in+out"')
|
80 |
+
if target not in ("in", "out", "in+out"):
|
81 |
+
raise nx.NetworkXError('target must be one of "in", "out", or "in+out"')
|
82 |
+
direction = {"out": G.out_degree, "in": G.in_degree, "in+out": G.degree}
|
83 |
+
neighbor_funcs = {
|
84 |
+
"out": G.successors,
|
85 |
+
"in": G.predecessors,
|
86 |
+
"in+out": G.neighbors,
|
87 |
+
}
|
88 |
+
source_degree = direction[source]
|
89 |
+
target_degree = direction[target]
|
90 |
+
neighbors = neighbor_funcs[source]
|
91 |
+
# `reverse` indicates whether to look at the in-edge when
|
92 |
+
# computing the weight of an edge.
|
93 |
+
reverse = source == "in"
|
94 |
+
else:
|
95 |
+
if source != "in+out" or target != "in+out":
|
96 |
+
raise nx.NetworkXError(
|
97 |
+
f"source and target arguments are only supported for directed graphs"
|
98 |
+
)
|
99 |
+
source_degree = G.degree
|
100 |
+
target_degree = G.degree
|
101 |
+
neighbors = G.neighbors
|
102 |
+
reverse = False
|
103 |
+
dsum = defaultdict(int)
|
104 |
+
dnorm = defaultdict(int)
|
105 |
+
# Check if `source_nodes` is actually a single node in the graph.
|
106 |
+
source_nodes = source_degree(nodes)
|
107 |
+
if nodes in G:
|
108 |
+
source_nodes = [(nodes, source_degree(nodes))]
|
109 |
+
for n, k in source_nodes:
|
110 |
+
nbrdeg = target_degree(neighbors(n))
|
111 |
+
if weight is None:
|
112 |
+
s = sum(d for n, d in nbrdeg)
|
113 |
+
else: # weight nbr degree by weight of (n,nbr) edge
|
114 |
+
if reverse:
|
115 |
+
s = sum(G[nbr][n].get(weight, 1) * d for nbr, d in nbrdeg)
|
116 |
+
else:
|
117 |
+
s = sum(G[n][nbr].get(weight, 1) * d for nbr, d in nbrdeg)
|
118 |
+
dnorm[k] += source_degree(n, weight=weight)
|
119 |
+
dsum[k] += s
|
120 |
+
|
121 |
+
# normalize
|
122 |
+
return {k: avg if dnorm[k] == 0 else avg / dnorm[k] for k, avg in dsum.items()}
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/correlation.py
ADDED
@@ -0,0 +1,302 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Node assortativity coefficients and correlation measures.
|
2 |
+
"""
|
3 |
+
import networkx as nx
|
4 |
+
from networkx.algorithms.assortativity.mixing import (
|
5 |
+
attribute_mixing_matrix,
|
6 |
+
degree_mixing_matrix,
|
7 |
+
)
|
8 |
+
from networkx.algorithms.assortativity.pairs import node_degree_xy
|
9 |
+
|
10 |
+
__all__ = [
|
11 |
+
"degree_pearson_correlation_coefficient",
|
12 |
+
"degree_assortativity_coefficient",
|
13 |
+
"attribute_assortativity_coefficient",
|
14 |
+
"numeric_assortativity_coefficient",
|
15 |
+
]
|
16 |
+
|
17 |
+
|
18 |
+
@nx._dispatchable(edge_attrs="weight")
|
19 |
+
def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None):
|
20 |
+
"""Compute degree assortativity of graph.
|
21 |
+
|
22 |
+
Assortativity measures the similarity of connections
|
23 |
+
in the graph with respect to the node degree.
|
24 |
+
|
25 |
+
Parameters
|
26 |
+
----------
|
27 |
+
G : NetworkX graph
|
28 |
+
|
29 |
+
x: string ('in','out')
|
30 |
+
The degree type for source node (directed graphs only).
|
31 |
+
|
32 |
+
y: string ('in','out')
|
33 |
+
The degree type for target node (directed graphs only).
|
34 |
+
|
35 |
+
weight: string or None, optional (default=None)
|
36 |
+
The edge attribute that holds the numerical value used
|
37 |
+
as a weight. If None, then each edge has weight 1.
|
38 |
+
The degree is the sum of the edge weights adjacent to the node.
|
39 |
+
|
40 |
+
nodes: list or iterable (optional)
|
41 |
+
Compute degree assortativity only for nodes in container.
|
42 |
+
The default is all nodes.
|
43 |
+
|
44 |
+
Returns
|
45 |
+
-------
|
46 |
+
r : float
|
47 |
+
Assortativity of graph by degree.
|
48 |
+
|
49 |
+
Examples
|
50 |
+
--------
|
51 |
+
>>> G = nx.path_graph(4)
|
52 |
+
>>> r = nx.degree_assortativity_coefficient(G)
|
53 |
+
>>> print(f"{r:3.1f}")
|
54 |
+
-0.5
|
55 |
+
|
56 |
+
See Also
|
57 |
+
--------
|
58 |
+
attribute_assortativity_coefficient
|
59 |
+
numeric_assortativity_coefficient
|
60 |
+
degree_mixing_dict
|
61 |
+
degree_mixing_matrix
|
62 |
+
|
63 |
+
Notes
|
64 |
+
-----
|
65 |
+
This computes Eq. (21) in Ref. [1]_ , where e is the joint
|
66 |
+
probability distribution (mixing matrix) of the degrees. If G is
|
67 |
+
directed than the matrix e is the joint probability of the
|
68 |
+
user-specified degree type for the source and target.
|
69 |
+
|
70 |
+
References
|
71 |
+
----------
|
72 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
73 |
+
Physical Review E, 67 026126, 2003
|
74 |
+
.. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
|
75 |
+
Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
|
76 |
+
"""
|
77 |
+
if nodes is None:
|
78 |
+
nodes = G.nodes
|
79 |
+
|
80 |
+
degrees = None
|
81 |
+
|
82 |
+
if G.is_directed():
|
83 |
+
indeg = (
|
84 |
+
{d for _, d in G.in_degree(nodes, weight=weight)}
|
85 |
+
if "in" in (x, y)
|
86 |
+
else set()
|
87 |
+
)
|
88 |
+
outdeg = (
|
89 |
+
{d for _, d in G.out_degree(nodes, weight=weight)}
|
90 |
+
if "out" in (x, y)
|
91 |
+
else set()
|
92 |
+
)
|
93 |
+
degrees = set.union(indeg, outdeg)
|
94 |
+
else:
|
95 |
+
degrees = {d for _, d in G.degree(nodes, weight=weight)}
|
96 |
+
|
97 |
+
mapping = {d: i for i, d in enumerate(degrees)}
|
98 |
+
M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight, mapping=mapping)
|
99 |
+
|
100 |
+
return _numeric_ac(M, mapping=mapping)
|
101 |
+
|
102 |
+
|
103 |
+
@nx._dispatchable(edge_attrs="weight")
|
104 |
+
def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None):
|
105 |
+
"""Compute degree assortativity of graph.
|
106 |
+
|
107 |
+
Assortativity measures the similarity of connections
|
108 |
+
in the graph with respect to the node degree.
|
109 |
+
|
110 |
+
This is the same as degree_assortativity_coefficient but uses the
|
111 |
+
potentially faster scipy.stats.pearsonr function.
|
112 |
+
|
113 |
+
Parameters
|
114 |
+
----------
|
115 |
+
G : NetworkX graph
|
116 |
+
|
117 |
+
x: string ('in','out')
|
118 |
+
The degree type for source node (directed graphs only).
|
119 |
+
|
120 |
+
y: string ('in','out')
|
121 |
+
The degree type for target node (directed graphs only).
|
122 |
+
|
123 |
+
weight: string or None, optional (default=None)
|
124 |
+
The edge attribute that holds the numerical value used
|
125 |
+
as a weight. If None, then each edge has weight 1.
|
126 |
+
The degree is the sum of the edge weights adjacent to the node.
|
127 |
+
|
128 |
+
nodes: list or iterable (optional)
|
129 |
+
Compute pearson correlation of degrees only for specified nodes.
|
130 |
+
The default is all nodes.
|
131 |
+
|
132 |
+
Returns
|
133 |
+
-------
|
134 |
+
r : float
|
135 |
+
Assortativity of graph by degree.
|
136 |
+
|
137 |
+
Examples
|
138 |
+
--------
|
139 |
+
>>> G = nx.path_graph(4)
|
140 |
+
>>> r = nx.degree_pearson_correlation_coefficient(G)
|
141 |
+
>>> print(f"{r:3.1f}")
|
142 |
+
-0.5
|
143 |
+
|
144 |
+
Notes
|
145 |
+
-----
|
146 |
+
This calls scipy.stats.pearsonr.
|
147 |
+
|
148 |
+
References
|
149 |
+
----------
|
150 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks
|
151 |
+
Physical Review E, 67 026126, 2003
|
152 |
+
.. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
|
153 |
+
Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
|
154 |
+
"""
|
155 |
+
import scipy as sp
|
156 |
+
|
157 |
+
xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
|
158 |
+
x, y = zip(*xy)
|
159 |
+
return float(sp.stats.pearsonr(x, y)[0])
|
160 |
+
|
161 |
+
|
162 |
+
@nx._dispatchable(node_attrs="attribute")
|
163 |
+
def attribute_assortativity_coefficient(G, attribute, nodes=None):
|
164 |
+
"""Compute assortativity for node attributes.
|
165 |
+
|
166 |
+
Assortativity measures the similarity of connections
|
167 |
+
in the graph with respect to the given attribute.
|
168 |
+
|
169 |
+
Parameters
|
170 |
+
----------
|
171 |
+
G : NetworkX graph
|
172 |
+
|
173 |
+
attribute : string
|
174 |
+
Node attribute key
|
175 |
+
|
176 |
+
nodes: list or iterable (optional)
|
177 |
+
Compute attribute assortativity for nodes in container.
|
178 |
+
The default is all nodes.
|
179 |
+
|
180 |
+
Returns
|
181 |
+
-------
|
182 |
+
r: float
|
183 |
+
Assortativity of graph for given attribute
|
184 |
+
|
185 |
+
Examples
|
186 |
+
--------
|
187 |
+
>>> G = nx.Graph()
|
188 |
+
>>> G.add_nodes_from([0, 1], color="red")
|
189 |
+
>>> G.add_nodes_from([2, 3], color="blue")
|
190 |
+
>>> G.add_edges_from([(0, 1), (2, 3)])
|
191 |
+
>>> print(nx.attribute_assortativity_coefficient(G, "color"))
|
192 |
+
1.0
|
193 |
+
|
194 |
+
Notes
|
195 |
+
-----
|
196 |
+
This computes Eq. (2) in Ref. [1]_ , (trace(M)-sum(M^2))/(1-sum(M^2)),
|
197 |
+
where M is the joint probability distribution (mixing matrix)
|
198 |
+
of the specified attribute.
|
199 |
+
|
200 |
+
References
|
201 |
+
----------
|
202 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
203 |
+
Physical Review E, 67 026126, 2003
|
204 |
+
"""
|
205 |
+
M = attribute_mixing_matrix(G, attribute, nodes)
|
206 |
+
return attribute_ac(M)
|
207 |
+
|
208 |
+
|
209 |
+
@nx._dispatchable(node_attrs="attribute")
|
210 |
+
def numeric_assortativity_coefficient(G, attribute, nodes=None):
|
211 |
+
"""Compute assortativity for numerical node attributes.
|
212 |
+
|
213 |
+
Assortativity measures the similarity of connections
|
214 |
+
in the graph with respect to the given numeric attribute.
|
215 |
+
|
216 |
+
Parameters
|
217 |
+
----------
|
218 |
+
G : NetworkX graph
|
219 |
+
|
220 |
+
attribute : string
|
221 |
+
Node attribute key.
|
222 |
+
|
223 |
+
nodes: list or iterable (optional)
|
224 |
+
Compute numeric assortativity only for attributes of nodes in
|
225 |
+
container. The default is all nodes.
|
226 |
+
|
227 |
+
Returns
|
228 |
+
-------
|
229 |
+
r: float
|
230 |
+
Assortativity of graph for given attribute
|
231 |
+
|
232 |
+
Examples
|
233 |
+
--------
|
234 |
+
>>> G = nx.Graph()
|
235 |
+
>>> G.add_nodes_from([0, 1], size=2)
|
236 |
+
>>> G.add_nodes_from([2, 3], size=3)
|
237 |
+
>>> G.add_edges_from([(0, 1), (2, 3)])
|
238 |
+
>>> print(nx.numeric_assortativity_coefficient(G, "size"))
|
239 |
+
1.0
|
240 |
+
|
241 |
+
Notes
|
242 |
+
-----
|
243 |
+
This computes Eq. (21) in Ref. [1]_ , which is the Pearson correlation
|
244 |
+
coefficient of the specified (scalar valued) attribute across edges.
|
245 |
+
|
246 |
+
References
|
247 |
+
----------
|
248 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks
|
249 |
+
Physical Review E, 67 026126, 2003
|
250 |
+
"""
|
251 |
+
if nodes is None:
|
252 |
+
nodes = G.nodes
|
253 |
+
vals = {G.nodes[n][attribute] for n in nodes}
|
254 |
+
mapping = {d: i for i, d in enumerate(vals)}
|
255 |
+
M = attribute_mixing_matrix(G, attribute, nodes, mapping)
|
256 |
+
return _numeric_ac(M, mapping)
|
257 |
+
|
258 |
+
|
259 |
+
def attribute_ac(M):
|
260 |
+
"""Compute assortativity for attribute matrix M.
|
261 |
+
|
262 |
+
Parameters
|
263 |
+
----------
|
264 |
+
M : numpy.ndarray
|
265 |
+
2D ndarray representing the attribute mixing matrix.
|
266 |
+
|
267 |
+
Notes
|
268 |
+
-----
|
269 |
+
This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e^2))/(1-sum(e^2)),
|
270 |
+
where e is the joint probability distribution (mixing matrix)
|
271 |
+
of the specified attribute.
|
272 |
+
|
273 |
+
References
|
274 |
+
----------
|
275 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
276 |
+
Physical Review E, 67 026126, 2003
|
277 |
+
"""
|
278 |
+
if M.sum() != 1.0:
|
279 |
+
M = M / M.sum()
|
280 |
+
s = (M @ M).sum()
|
281 |
+
t = M.trace()
|
282 |
+
r = (t - s) / (1 - s)
|
283 |
+
return float(r)
|
284 |
+
|
285 |
+
|
286 |
+
def _numeric_ac(M, mapping):
|
287 |
+
# M is a 2D numpy array
|
288 |
+
# numeric assortativity coefficient, pearsonr
|
289 |
+
import numpy as np
|
290 |
+
|
291 |
+
if M.sum() != 1.0:
|
292 |
+
M = M / M.sum()
|
293 |
+
x = np.array(list(mapping.keys()))
|
294 |
+
y = x # x and y have the same support
|
295 |
+
idx = list(mapping.values())
|
296 |
+
a = M.sum(axis=0)
|
297 |
+
b = M.sum(axis=1)
|
298 |
+
vara = (a[idx] * x**2).sum() - ((a[idx] * x).sum()) ** 2
|
299 |
+
varb = (b[idx] * y**2).sum() - ((b[idx] * y).sum()) ** 2
|
300 |
+
xy = np.outer(x, y)
|
301 |
+
ab = np.outer(a[idx], b[idx])
|
302 |
+
return float((xy * (M - ab)).sum() / np.sqrt(vara * varb))
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/mixing.py
ADDED
@@ -0,0 +1,254 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Mixing matrices for node attributes and degree.
|
3 |
+
"""
|
4 |
+
import networkx as nx
|
5 |
+
from networkx.algorithms.assortativity.pairs import node_attribute_xy, node_degree_xy
|
6 |
+
from networkx.utils import dict_to_numpy_array
|
7 |
+
|
8 |
+
__all__ = [
|
9 |
+
"attribute_mixing_matrix",
|
10 |
+
"attribute_mixing_dict",
|
11 |
+
"degree_mixing_matrix",
|
12 |
+
"degree_mixing_dict",
|
13 |
+
"mixing_dict",
|
14 |
+
]
|
15 |
+
|
16 |
+
|
17 |
+
@nx._dispatchable(node_attrs="attribute")
|
18 |
+
def attribute_mixing_dict(G, attribute, nodes=None, normalized=False):
|
19 |
+
"""Returns dictionary representation of mixing matrix for attribute.
|
20 |
+
|
21 |
+
Parameters
|
22 |
+
----------
|
23 |
+
G : graph
|
24 |
+
NetworkX graph object.
|
25 |
+
|
26 |
+
attribute : string
|
27 |
+
Node attribute key.
|
28 |
+
|
29 |
+
nodes: list or iterable (optional)
|
30 |
+
Unse nodes in container to build the dict. The default is all nodes.
|
31 |
+
|
32 |
+
normalized : bool (default=False)
|
33 |
+
Return counts if False or probabilities if True.
|
34 |
+
|
35 |
+
Examples
|
36 |
+
--------
|
37 |
+
>>> G = nx.Graph()
|
38 |
+
>>> G.add_nodes_from([0, 1], color="red")
|
39 |
+
>>> G.add_nodes_from([2, 3], color="blue")
|
40 |
+
>>> G.add_edge(1, 3)
|
41 |
+
>>> d = nx.attribute_mixing_dict(G, "color")
|
42 |
+
>>> print(d["red"]["blue"])
|
43 |
+
1
|
44 |
+
>>> print(d["blue"]["red"]) # d symmetric for undirected graphs
|
45 |
+
1
|
46 |
+
|
47 |
+
Returns
|
48 |
+
-------
|
49 |
+
d : dictionary
|
50 |
+
Counts or joint probability of occurrence of attribute pairs.
|
51 |
+
"""
|
52 |
+
xy_iter = node_attribute_xy(G, attribute, nodes)
|
53 |
+
return mixing_dict(xy_iter, normalized=normalized)
|
54 |
+
|
55 |
+
|
56 |
+
@nx._dispatchable(node_attrs="attribute")
|
57 |
+
def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True):
|
58 |
+
"""Returns mixing matrix for attribute.
|
59 |
+
|
60 |
+
Parameters
|
61 |
+
----------
|
62 |
+
G : graph
|
63 |
+
NetworkX graph object.
|
64 |
+
|
65 |
+
attribute : string
|
66 |
+
Node attribute key.
|
67 |
+
|
68 |
+
nodes: list or iterable (optional)
|
69 |
+
Use only nodes in container to build the matrix. The default is
|
70 |
+
all nodes.
|
71 |
+
|
72 |
+
mapping : dictionary, optional
|
73 |
+
Mapping from node attribute to integer index in matrix.
|
74 |
+
If not specified, an arbitrary ordering will be used.
|
75 |
+
|
76 |
+
normalized : bool (default=True)
|
77 |
+
Return counts if False or probabilities if True.
|
78 |
+
|
79 |
+
Returns
|
80 |
+
-------
|
81 |
+
m: numpy array
|
82 |
+
Counts or joint probability of occurrence of attribute pairs.
|
83 |
+
|
84 |
+
Notes
|
85 |
+
-----
|
86 |
+
If each node has a unique attribute value, the unnormalized mixing matrix
|
87 |
+
will be equal to the adjacency matrix. To get a denser mixing matrix,
|
88 |
+
the rounding can be performed to form groups of nodes with equal values.
|
89 |
+
For example, the exact height of persons in cm (180.79155222, 163.9080892,
|
90 |
+
163.30095355, 167.99016217, 168.21590163, ...) can be rounded to (180, 163,
|
91 |
+
163, 168, 168, ...).
|
92 |
+
|
93 |
+
Definitions of attribute mixing matrix vary on whether the matrix
|
94 |
+
should include rows for attribute values that don't arise. Here we
|
95 |
+
do not include such empty-rows. But you can force them to appear
|
96 |
+
by inputting a `mapping` that includes those values.
|
97 |
+
|
98 |
+
Examples
|
99 |
+
--------
|
100 |
+
>>> G = nx.path_graph(3)
|
101 |
+
>>> gender = {0: "male", 1: "female", 2: "female"}
|
102 |
+
>>> nx.set_node_attributes(G, gender, "gender")
|
103 |
+
>>> mapping = {"male": 0, "female": 1}
|
104 |
+
>>> mix_mat = nx.attribute_mixing_matrix(G, "gender", mapping=mapping)
|
105 |
+
>>> mix_mat
|
106 |
+
array([[0. , 0.25],
|
107 |
+
[0.25, 0.5 ]])
|
108 |
+
"""
|
109 |
+
d = attribute_mixing_dict(G, attribute, nodes)
|
110 |
+
a = dict_to_numpy_array(d, mapping=mapping)
|
111 |
+
if normalized:
|
112 |
+
a = a / a.sum()
|
113 |
+
return a
|
114 |
+
|
115 |
+
|
116 |
+
@nx._dispatchable(edge_attrs="weight")
|
117 |
+
def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False):
|
118 |
+
"""Returns dictionary representation of mixing matrix for degree.
|
119 |
+
|
120 |
+
Parameters
|
121 |
+
----------
|
122 |
+
G : graph
|
123 |
+
NetworkX graph object.
|
124 |
+
|
125 |
+
x: string ('in','out')
|
126 |
+
The degree type for source node (directed graphs only).
|
127 |
+
|
128 |
+
y: string ('in','out')
|
129 |
+
The degree type for target node (directed graphs only).
|
130 |
+
|
131 |
+
weight: string or None, optional (default=None)
|
132 |
+
The edge attribute that holds the numerical value used
|
133 |
+
as a weight. If None, then each edge has weight 1.
|
134 |
+
The degree is the sum of the edge weights adjacent to the node.
|
135 |
+
|
136 |
+
normalized : bool (default=False)
|
137 |
+
Return counts if False or probabilities if True.
|
138 |
+
|
139 |
+
Returns
|
140 |
+
-------
|
141 |
+
d: dictionary
|
142 |
+
Counts or joint probability of occurrence of degree pairs.
|
143 |
+
"""
|
144 |
+
xy_iter = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
|
145 |
+
return mixing_dict(xy_iter, normalized=normalized)
|
146 |
+
|
147 |
+
|
148 |
+
@nx._dispatchable(edge_attrs="weight")
|
149 |
+
def degree_mixing_matrix(
|
150 |
+
G, x="out", y="in", weight=None, nodes=None, normalized=True, mapping=None
|
151 |
+
):
|
152 |
+
"""Returns mixing matrix for attribute.
|
153 |
+
|
154 |
+
Parameters
|
155 |
+
----------
|
156 |
+
G : graph
|
157 |
+
NetworkX graph object.
|
158 |
+
|
159 |
+
x: string ('in','out')
|
160 |
+
The degree type for source node (directed graphs only).
|
161 |
+
|
162 |
+
y: string ('in','out')
|
163 |
+
The degree type for target node (directed graphs only).
|
164 |
+
|
165 |
+
nodes: list or iterable (optional)
|
166 |
+
Build the matrix using only nodes in container.
|
167 |
+
The default is all nodes.
|
168 |
+
|
169 |
+
weight: string or None, optional (default=None)
|
170 |
+
The edge attribute that holds the numerical value used
|
171 |
+
as a weight. If None, then each edge has weight 1.
|
172 |
+
The degree is the sum of the edge weights adjacent to the node.
|
173 |
+
|
174 |
+
normalized : bool (default=True)
|
175 |
+
Return counts if False or probabilities if True.
|
176 |
+
|
177 |
+
mapping : dictionary, optional
|
178 |
+
Mapping from node degree to integer index in matrix.
|
179 |
+
If not specified, an arbitrary ordering will be used.
|
180 |
+
|
181 |
+
Returns
|
182 |
+
-------
|
183 |
+
m: numpy array
|
184 |
+
Counts, or joint probability, of occurrence of node degree.
|
185 |
+
|
186 |
+
Notes
|
187 |
+
-----
|
188 |
+
Definitions of degree mixing matrix vary on whether the matrix
|
189 |
+
should include rows for degree values that don't arise. Here we
|
190 |
+
do not include such empty-rows. But you can force them to appear
|
191 |
+
by inputting a `mapping` that includes those values. See examples.
|
192 |
+
|
193 |
+
Examples
|
194 |
+
--------
|
195 |
+
>>> G = nx.star_graph(3)
|
196 |
+
>>> mix_mat = nx.degree_mixing_matrix(G)
|
197 |
+
>>> mix_mat
|
198 |
+
array([[0. , 0.5],
|
199 |
+
[0.5, 0. ]])
|
200 |
+
|
201 |
+
If you want every possible degree to appear as a row, even if no nodes
|
202 |
+
have that degree, use `mapping` as follows,
|
203 |
+
|
204 |
+
>>> max_degree = max(deg for n, deg in G.degree)
|
205 |
+
>>> mapping = {x: x for x in range(max_degree + 1)} # identity mapping
|
206 |
+
>>> mix_mat = nx.degree_mixing_matrix(G, mapping=mapping)
|
207 |
+
>>> mix_mat
|
208 |
+
array([[0. , 0. , 0. , 0. ],
|
209 |
+
[0. , 0. , 0. , 0.5],
|
210 |
+
[0. , 0. , 0. , 0. ],
|
211 |
+
[0. , 0.5, 0. , 0. ]])
|
212 |
+
"""
|
213 |
+
d = degree_mixing_dict(G, x=x, y=y, nodes=nodes, weight=weight)
|
214 |
+
a = dict_to_numpy_array(d, mapping=mapping)
|
215 |
+
if normalized:
|
216 |
+
a = a / a.sum()
|
217 |
+
return a
|
218 |
+
|
219 |
+
|
220 |
+
def mixing_dict(xy, normalized=False):
|
221 |
+
"""Returns a dictionary representation of mixing matrix.
|
222 |
+
|
223 |
+
Parameters
|
224 |
+
----------
|
225 |
+
xy : list or container of two-tuples
|
226 |
+
Pairs of (x,y) items.
|
227 |
+
|
228 |
+
attribute : string
|
229 |
+
Node attribute key
|
230 |
+
|
231 |
+
normalized : bool (default=False)
|
232 |
+
Return counts if False or probabilities if True.
|
233 |
+
|
234 |
+
Returns
|
235 |
+
-------
|
236 |
+
d: dictionary
|
237 |
+
Counts or Joint probability of occurrence of values in xy.
|
238 |
+
"""
|
239 |
+
d = {}
|
240 |
+
psum = 0.0
|
241 |
+
for x, y in xy:
|
242 |
+
if x not in d:
|
243 |
+
d[x] = {}
|
244 |
+
if y not in d:
|
245 |
+
d[y] = {}
|
246 |
+
v = d[x].get(y, 0)
|
247 |
+
d[x][y] = v + 1
|
248 |
+
psum += 1
|
249 |
+
|
250 |
+
if normalized:
|
251 |
+
for _, jdict in d.items():
|
252 |
+
for j in jdict:
|
253 |
+
jdict[j] /= psum
|
254 |
+
return d
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/neighbor_degree.py
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import networkx as nx
|
2 |
+
|
3 |
+
__all__ = ["average_neighbor_degree"]
|
4 |
+
|
5 |
+
|
6 |
+
@nx._dispatchable(edge_attrs="weight")
|
7 |
+
def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None):
|
8 |
+
r"""Returns the average degree of the neighborhood of each node.
|
9 |
+
|
10 |
+
In an undirected graph, the neighborhood `N(i)` of node `i` contains the
|
11 |
+
nodes that are connected to `i` by an edge.
|
12 |
+
|
13 |
+
For directed graphs, `N(i)` is defined according to the parameter `source`:
|
14 |
+
|
15 |
+
- if source is 'in', then `N(i)` consists of predecessors of node `i`.
|
16 |
+
- if source is 'out', then `N(i)` consists of successors of node `i`.
|
17 |
+
- if source is 'in+out', then `N(i)` is both predecessors and successors.
|
18 |
+
|
19 |
+
The average neighborhood degree of a node `i` is
|
20 |
+
|
21 |
+
.. math::
|
22 |
+
|
23 |
+
k_{nn,i} = \frac{1}{|N(i)|} \sum_{j \in N(i)} k_j
|
24 |
+
|
25 |
+
where `N(i)` are the neighbors of node `i` and `k_j` is
|
26 |
+
the degree of node `j` which belongs to `N(i)`. For weighted
|
27 |
+
graphs, an analogous measure can be defined [1]_,
|
28 |
+
|
29 |
+
.. math::
|
30 |
+
|
31 |
+
k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
|
32 |
+
|
33 |
+
where `s_i` is the weighted degree of node `i`, `w_{ij}`
|
34 |
+
is the weight of the edge that links `i` and `j` and
|
35 |
+
`N(i)` are the neighbors of node `i`.
|
36 |
+
|
37 |
+
|
38 |
+
Parameters
|
39 |
+
----------
|
40 |
+
G : NetworkX graph
|
41 |
+
|
42 |
+
source : string ("in"|"out"|"in+out"), optional (default="out")
|
43 |
+
Directed graphs only.
|
44 |
+
Use "in"- or "out"-neighbors of source node.
|
45 |
+
|
46 |
+
target : string ("in"|"out"|"in+out"), optional (default="out")
|
47 |
+
Directed graphs only.
|
48 |
+
Use "in"- or "out"-degree for target node.
|
49 |
+
|
50 |
+
nodes : list or iterable, optional (default=G.nodes)
|
51 |
+
Compute neighbor degree only for specified nodes.
|
52 |
+
|
53 |
+
weight : string or None, optional (default=None)
|
54 |
+
The edge attribute that holds the numerical value used as a weight.
|
55 |
+
If None, then each edge has weight 1.
|
56 |
+
|
57 |
+
Returns
|
58 |
+
-------
|
59 |
+
d: dict
|
60 |
+
A dictionary keyed by node to the average degree of its neighbors.
|
61 |
+
|
62 |
+
Raises
|
63 |
+
------
|
64 |
+
NetworkXError
|
65 |
+
If either `source` or `target` are not one of 'in', 'out', or 'in+out'.
|
66 |
+
If either `source` or `target` is passed for an undirected graph.
|
67 |
+
|
68 |
+
Examples
|
69 |
+
--------
|
70 |
+
>>> G = nx.path_graph(4)
|
71 |
+
>>> G.edges[0, 1]["weight"] = 5
|
72 |
+
>>> G.edges[2, 3]["weight"] = 3
|
73 |
+
|
74 |
+
>>> nx.average_neighbor_degree(G)
|
75 |
+
{0: 2.0, 1: 1.5, 2: 1.5, 3: 2.0}
|
76 |
+
>>> nx.average_neighbor_degree(G, weight="weight")
|
77 |
+
{0: 2.0, 1: 1.1666666666666667, 2: 1.25, 3: 2.0}
|
78 |
+
|
79 |
+
>>> G = nx.DiGraph()
|
80 |
+
>>> nx.add_path(G, [0, 1, 2, 3])
|
81 |
+
>>> nx.average_neighbor_degree(G, source="in", target="in")
|
82 |
+
{0: 0.0, 1: 0.0, 2: 1.0, 3: 1.0}
|
83 |
+
|
84 |
+
>>> nx.average_neighbor_degree(G, source="out", target="out")
|
85 |
+
{0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0}
|
86 |
+
|
87 |
+
See Also
|
88 |
+
--------
|
89 |
+
average_degree_connectivity
|
90 |
+
|
91 |
+
References
|
92 |
+
----------
|
93 |
+
.. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
|
94 |
+
"The architecture of complex weighted networks".
|
95 |
+
PNAS 101 (11): 3747–3752 (2004).
|
96 |
+
"""
|
97 |
+
if G.is_directed():
|
98 |
+
if source == "in":
|
99 |
+
source_degree = G.in_degree
|
100 |
+
elif source == "out":
|
101 |
+
source_degree = G.out_degree
|
102 |
+
elif source == "in+out":
|
103 |
+
source_degree = G.degree
|
104 |
+
else:
|
105 |
+
raise nx.NetworkXError(
|
106 |
+
f"source argument {source} must be 'in', 'out' or 'in+out'"
|
107 |
+
)
|
108 |
+
|
109 |
+
if target == "in":
|
110 |
+
target_degree = G.in_degree
|
111 |
+
elif target == "out":
|
112 |
+
target_degree = G.out_degree
|
113 |
+
elif target == "in+out":
|
114 |
+
target_degree = G.degree
|
115 |
+
else:
|
116 |
+
raise nx.NetworkXError(
|
117 |
+
f"target argument {target} must be 'in', 'out' or 'in+out'"
|
118 |
+
)
|
119 |
+
else:
|
120 |
+
if source != "out" or target != "out":
|
121 |
+
raise nx.NetworkXError(
|
122 |
+
f"source and target arguments are only supported for directed graphs"
|
123 |
+
)
|
124 |
+
source_degree = target_degree = G.degree
|
125 |
+
|
126 |
+
# precompute target degrees -- should *not* be weighted degree
|
127 |
+
t_deg = dict(target_degree())
|
128 |
+
|
129 |
+
# Set up both predecessor and successor neighbor dicts leaving empty if not needed
|
130 |
+
G_P = G_S = {n: {} for n in G}
|
131 |
+
if G.is_directed():
|
132 |
+
# "in" or "in+out" cases: G_P contains predecessors
|
133 |
+
if "in" in source:
|
134 |
+
G_P = G.pred
|
135 |
+
# "out" or "in+out" cases: G_S contains successors
|
136 |
+
if "out" in source:
|
137 |
+
G_S = G.succ
|
138 |
+
else:
|
139 |
+
# undirected leave G_P empty but G_S is the adjacency
|
140 |
+
G_S = G.adj
|
141 |
+
|
142 |
+
# Main loop: Compute average degree of neighbors
|
143 |
+
avg = {}
|
144 |
+
for n, deg in source_degree(nodes, weight=weight):
|
145 |
+
# handle degree zero average
|
146 |
+
if deg == 0:
|
147 |
+
avg[n] = 0.0
|
148 |
+
continue
|
149 |
+
|
150 |
+
# we sum over both G_P and G_S, but one of the two is usually empty.
|
151 |
+
if weight is None:
|
152 |
+
avg[n] = (
|
153 |
+
sum(t_deg[nbr] for nbr in G_S[n]) + sum(t_deg[nbr] for nbr in G_P[n])
|
154 |
+
) / deg
|
155 |
+
else:
|
156 |
+
avg[n] = (
|
157 |
+
sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_S[n].items())
|
158 |
+
+ sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_P[n].items())
|
159 |
+
) / deg
|
160 |
+
return avg
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/pairs.py
ADDED
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Generators of x-y pairs of node data."""
|
2 |
+
import networkx as nx
|
3 |
+
|
4 |
+
__all__ = ["node_attribute_xy", "node_degree_xy"]
|
5 |
+
|
6 |
+
|
7 |
+
@nx._dispatchable(node_attrs="attribute")
|
8 |
+
def node_attribute_xy(G, attribute, nodes=None):
|
9 |
+
"""Returns iterator of node-attribute pairs for all edges in G.
|
10 |
+
|
11 |
+
Parameters
|
12 |
+
----------
|
13 |
+
G: NetworkX graph
|
14 |
+
|
15 |
+
attribute: key
|
16 |
+
The node attribute key.
|
17 |
+
|
18 |
+
nodes: list or iterable (optional)
|
19 |
+
Use only edges that are incident to specified nodes.
|
20 |
+
The default is all nodes.
|
21 |
+
|
22 |
+
Returns
|
23 |
+
-------
|
24 |
+
(x, y): 2-tuple
|
25 |
+
Generates 2-tuple of (attribute, attribute) values.
|
26 |
+
|
27 |
+
Examples
|
28 |
+
--------
|
29 |
+
>>> G = nx.DiGraph()
|
30 |
+
>>> G.add_node(1, color="red")
|
31 |
+
>>> G.add_node(2, color="blue")
|
32 |
+
>>> G.add_edge(1, 2)
|
33 |
+
>>> list(nx.node_attribute_xy(G, "color"))
|
34 |
+
[('red', 'blue')]
|
35 |
+
|
36 |
+
Notes
|
37 |
+
-----
|
38 |
+
For undirected graphs each edge is produced twice, once for each edge
|
39 |
+
representation (u, v) and (v, u), with the exception of self-loop edges
|
40 |
+
which only appear once.
|
41 |
+
"""
|
42 |
+
if nodes is None:
|
43 |
+
nodes = set(G)
|
44 |
+
else:
|
45 |
+
nodes = set(nodes)
|
46 |
+
Gnodes = G.nodes
|
47 |
+
for u, nbrsdict in G.adjacency():
|
48 |
+
if u not in nodes:
|
49 |
+
continue
|
50 |
+
uattr = Gnodes[u].get(attribute, None)
|
51 |
+
if G.is_multigraph():
|
52 |
+
for v, keys in nbrsdict.items():
|
53 |
+
vattr = Gnodes[v].get(attribute, None)
|
54 |
+
for _ in keys:
|
55 |
+
yield (uattr, vattr)
|
56 |
+
else:
|
57 |
+
for v in nbrsdict:
|
58 |
+
vattr = Gnodes[v].get(attribute, None)
|
59 |
+
yield (uattr, vattr)
|
60 |
+
|
61 |
+
|
62 |
+
@nx._dispatchable(edge_attrs="weight")
|
63 |
+
def node_degree_xy(G, x="out", y="in", weight=None, nodes=None):
|
64 |
+
"""Generate node degree-degree pairs for edges in G.
|
65 |
+
|
66 |
+
Parameters
|
67 |
+
----------
|
68 |
+
G: NetworkX graph
|
69 |
+
|
70 |
+
x: string ('in','out')
|
71 |
+
The degree type for source node (directed graphs only).
|
72 |
+
|
73 |
+
y: string ('in','out')
|
74 |
+
The degree type for target node (directed graphs only).
|
75 |
+
|
76 |
+
weight: string or None, optional (default=None)
|
77 |
+
The edge attribute that holds the numerical value used
|
78 |
+
as a weight. If None, then each edge has weight 1.
|
79 |
+
The degree is the sum of the edge weights adjacent to the node.
|
80 |
+
|
81 |
+
nodes: list or iterable (optional)
|
82 |
+
Use only edges that are adjacency to specified nodes.
|
83 |
+
The default is all nodes.
|
84 |
+
|
85 |
+
Returns
|
86 |
+
-------
|
87 |
+
(x, y): 2-tuple
|
88 |
+
Generates 2-tuple of (degree, degree) values.
|
89 |
+
|
90 |
+
|
91 |
+
Examples
|
92 |
+
--------
|
93 |
+
>>> G = nx.DiGraph()
|
94 |
+
>>> G.add_edge(1, 2)
|
95 |
+
>>> list(nx.node_degree_xy(G, x="out", y="in"))
|
96 |
+
[(1, 1)]
|
97 |
+
>>> list(nx.node_degree_xy(G, x="in", y="out"))
|
98 |
+
[(0, 0)]
|
99 |
+
|
100 |
+
Notes
|
101 |
+
-----
|
102 |
+
For undirected graphs each edge is produced twice, once for each edge
|
103 |
+
representation (u, v) and (v, u), with the exception of self-loop edges
|
104 |
+
which only appear once.
|
105 |
+
"""
|
106 |
+
nodes = set(G) if nodes is None else set(nodes)
|
107 |
+
if G.is_directed():
|
108 |
+
direction = {"out": G.out_degree, "in": G.in_degree}
|
109 |
+
xdeg = direction[x]
|
110 |
+
ydeg = direction[y]
|
111 |
+
else:
|
112 |
+
xdeg = ydeg = G.degree
|
113 |
+
|
114 |
+
for u, degu in xdeg(nodes, weight=weight):
|
115 |
+
# use G.edges to treat multigraphs correctly
|
116 |
+
neighbors = (nbr for _, nbr in G.edges(u) if nbr in nodes)
|
117 |
+
for _, degv in ydeg(neighbors, weight=weight):
|
118 |
+
yield degu, degv
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__init__.py
ADDED
File without changes
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (207 Bytes). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-310.pyc
ADDED
Binary file (2.49 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-310.pyc
ADDED
Binary file (4.42 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-310.pyc
ADDED
Binary file (7.1 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-310.pyc
ADDED
Binary file (7.59 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-310.pyc
ADDED
Binary file (3.5 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-310.pyc
ADDED
Binary file (3.69 kB). View file
|
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/base_test.py
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import networkx as nx
|
2 |
+
|
3 |
+
|
4 |
+
class BaseTestAttributeMixing:
|
5 |
+
@classmethod
|
6 |
+
def setup_class(cls):
|
7 |
+
G = nx.Graph()
|
8 |
+
G.add_nodes_from([0, 1], fish="one")
|
9 |
+
G.add_nodes_from([2, 3], fish="two")
|
10 |
+
G.add_nodes_from([4], fish="red")
|
11 |
+
G.add_nodes_from([5], fish="blue")
|
12 |
+
G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
13 |
+
cls.G = G
|
14 |
+
|
15 |
+
D = nx.DiGraph()
|
16 |
+
D.add_nodes_from([0, 1], fish="one")
|
17 |
+
D.add_nodes_from([2, 3], fish="two")
|
18 |
+
D.add_nodes_from([4], fish="red")
|
19 |
+
D.add_nodes_from([5], fish="blue")
|
20 |
+
D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
21 |
+
cls.D = D
|
22 |
+
|
23 |
+
M = nx.MultiGraph()
|
24 |
+
M.add_nodes_from([0, 1], fish="one")
|
25 |
+
M.add_nodes_from([2, 3], fish="two")
|
26 |
+
M.add_nodes_from([4], fish="red")
|
27 |
+
M.add_nodes_from([5], fish="blue")
|
28 |
+
M.add_edges_from([(0, 1), (0, 1), (2, 3)])
|
29 |
+
cls.M = M
|
30 |
+
|
31 |
+
S = nx.Graph()
|
32 |
+
S.add_nodes_from([0, 1], fish="one")
|
33 |
+
S.add_nodes_from([2, 3], fish="two")
|
34 |
+
S.add_nodes_from([4], fish="red")
|
35 |
+
S.add_nodes_from([5], fish="blue")
|
36 |
+
S.add_edge(0, 0)
|
37 |
+
S.add_edge(2, 2)
|
38 |
+
cls.S = S
|
39 |
+
|
40 |
+
N = nx.Graph()
|
41 |
+
N.add_nodes_from([0, 1], margin=-2)
|
42 |
+
N.add_nodes_from([2, 3], margin=-2)
|
43 |
+
N.add_nodes_from([4], margin=-3)
|
44 |
+
N.add_nodes_from([5], margin=-4)
|
45 |
+
N.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
46 |
+
cls.N = N
|
47 |
+
|
48 |
+
F = nx.Graph()
|
49 |
+
F.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
|
50 |
+
F.add_edge(0, 2, weight=1)
|
51 |
+
nx.set_node_attributes(F, dict(F.degree(weight="weight")), "margin")
|
52 |
+
cls.F = F
|
53 |
+
|
54 |
+
K = nx.Graph()
|
55 |
+
K.add_nodes_from([1, 2], margin=-1)
|
56 |
+
K.add_nodes_from([3], margin=1)
|
57 |
+
K.add_nodes_from([4], margin=2)
|
58 |
+
K.add_edges_from([(3, 4), (1, 2), (1, 3)])
|
59 |
+
cls.K = K
|
60 |
+
|
61 |
+
|
62 |
+
class BaseTestDegreeMixing:
|
63 |
+
@classmethod
|
64 |
+
def setup_class(cls):
|
65 |
+
cls.P4 = nx.path_graph(4)
|
66 |
+
cls.D = nx.DiGraph()
|
67 |
+
cls.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)])
|
68 |
+
cls.D2 = nx.DiGraph()
|
69 |
+
cls.D2.add_edges_from([(0, 3), (1, 0), (1, 2), (2, 4), (4, 1), (4, 3), (4, 2)])
|
70 |
+
cls.M = nx.MultiGraph()
|
71 |
+
nx.add_path(cls.M, range(4))
|
72 |
+
cls.M.add_edge(0, 1)
|
73 |
+
cls.S = nx.Graph()
|
74 |
+
cls.S.add_edges_from([(0, 0), (1, 1)])
|
75 |
+
cls.W = nx.Graph()
|
76 |
+
cls.W.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
|
77 |
+
cls.W.add_edge(0, 2, weight=1)
|
78 |
+
S1 = nx.star_graph(4)
|
79 |
+
S2 = nx.star_graph(4)
|
80 |
+
cls.DS = nx.disjoint_union(S1, S2)
|
81 |
+
cls.DS.add_edge(4, 5)
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py
ADDED
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import permutations
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
import networkx as nx
|
6 |
+
|
7 |
+
|
8 |
+
class TestNeighborConnectivity:
|
9 |
+
def test_degree_p4(self):
|
10 |
+
G = nx.path_graph(4)
|
11 |
+
answer = {1: 2.0, 2: 1.5}
|
12 |
+
nd = nx.average_degree_connectivity(G)
|
13 |
+
assert nd == answer
|
14 |
+
|
15 |
+
D = G.to_directed()
|
16 |
+
answer = {2: 2.0, 4: 1.5}
|
17 |
+
nd = nx.average_degree_connectivity(D)
|
18 |
+
assert nd == answer
|
19 |
+
|
20 |
+
answer = {1: 2.0, 2: 1.5}
|
21 |
+
D = G.to_directed()
|
22 |
+
nd = nx.average_degree_connectivity(D, source="in", target="in")
|
23 |
+
assert nd == answer
|
24 |
+
|
25 |
+
D = G.to_directed()
|
26 |
+
nd = nx.average_degree_connectivity(D, source="in", target="in")
|
27 |
+
assert nd == answer
|
28 |
+
|
29 |
+
def test_degree_p4_weighted(self):
|
30 |
+
G = nx.path_graph(4)
|
31 |
+
G[1][2]["weight"] = 4
|
32 |
+
answer = {1: 2.0, 2: 1.8}
|
33 |
+
nd = nx.average_degree_connectivity(G, weight="weight")
|
34 |
+
assert nd == answer
|
35 |
+
answer = {1: 2.0, 2: 1.5}
|
36 |
+
nd = nx.average_degree_connectivity(G)
|
37 |
+
assert nd == answer
|
38 |
+
|
39 |
+
D = G.to_directed()
|
40 |
+
answer = {2: 2.0, 4: 1.8}
|
41 |
+
nd = nx.average_degree_connectivity(D, weight="weight")
|
42 |
+
assert nd == answer
|
43 |
+
|
44 |
+
answer = {1: 2.0, 2: 1.8}
|
45 |
+
D = G.to_directed()
|
46 |
+
nd = nx.average_degree_connectivity(
|
47 |
+
D, weight="weight", source="in", target="in"
|
48 |
+
)
|
49 |
+
assert nd == answer
|
50 |
+
|
51 |
+
D = G.to_directed()
|
52 |
+
nd = nx.average_degree_connectivity(
|
53 |
+
D, source="in", target="out", weight="weight"
|
54 |
+
)
|
55 |
+
assert nd == answer
|
56 |
+
|
57 |
+
def test_weight_keyword(self):
|
58 |
+
G = nx.path_graph(4)
|
59 |
+
G[1][2]["other"] = 4
|
60 |
+
answer = {1: 2.0, 2: 1.8}
|
61 |
+
nd = nx.average_degree_connectivity(G, weight="other")
|
62 |
+
assert nd == answer
|
63 |
+
answer = {1: 2.0, 2: 1.5}
|
64 |
+
nd = nx.average_degree_connectivity(G, weight=None)
|
65 |
+
assert nd == answer
|
66 |
+
|
67 |
+
D = G.to_directed()
|
68 |
+
answer = {2: 2.0, 4: 1.8}
|
69 |
+
nd = nx.average_degree_connectivity(D, weight="other")
|
70 |
+
assert nd == answer
|
71 |
+
|
72 |
+
answer = {1: 2.0, 2: 1.8}
|
73 |
+
D = G.to_directed()
|
74 |
+
nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in")
|
75 |
+
assert nd == answer
|
76 |
+
|
77 |
+
D = G.to_directed()
|
78 |
+
nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in")
|
79 |
+
assert nd == answer
|
80 |
+
|
81 |
+
def test_degree_barrat(self):
|
82 |
+
G = nx.star_graph(5)
|
83 |
+
G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
|
84 |
+
G[0][5]["weight"] = 5
|
85 |
+
nd = nx.average_degree_connectivity(G)[5]
|
86 |
+
assert nd == 1.8
|
87 |
+
nd = nx.average_degree_connectivity(G, weight="weight")[5]
|
88 |
+
assert nd == pytest.approx(3.222222, abs=1e-5)
|
89 |
+
|
90 |
+
def test_zero_deg(self):
|
91 |
+
G = nx.DiGraph()
|
92 |
+
G.add_edge(1, 2)
|
93 |
+
G.add_edge(1, 3)
|
94 |
+
G.add_edge(1, 4)
|
95 |
+
c = nx.average_degree_connectivity(G)
|
96 |
+
assert c == {1: 0, 3: 1}
|
97 |
+
c = nx.average_degree_connectivity(G, source="in", target="in")
|
98 |
+
assert c == {0: 0, 1: 0}
|
99 |
+
c = nx.average_degree_connectivity(G, source="in", target="out")
|
100 |
+
assert c == {0: 0, 1: 3}
|
101 |
+
c = nx.average_degree_connectivity(G, source="in", target="in+out")
|
102 |
+
assert c == {0: 0, 1: 3}
|
103 |
+
c = nx.average_degree_connectivity(G, source="out", target="out")
|
104 |
+
assert c == {0: 0, 3: 0}
|
105 |
+
c = nx.average_degree_connectivity(G, source="out", target="in")
|
106 |
+
assert c == {0: 0, 3: 1}
|
107 |
+
c = nx.average_degree_connectivity(G, source="out", target="in+out")
|
108 |
+
assert c == {0: 0, 3: 1}
|
109 |
+
|
110 |
+
def test_in_out_weight(self):
|
111 |
+
G = nx.DiGraph()
|
112 |
+
G.add_edge(1, 2, weight=1)
|
113 |
+
G.add_edge(1, 3, weight=1)
|
114 |
+
G.add_edge(3, 1, weight=1)
|
115 |
+
for s, t in permutations(["in", "out", "in+out"], 2):
|
116 |
+
c = nx.average_degree_connectivity(G, source=s, target=t)
|
117 |
+
cw = nx.average_degree_connectivity(G, source=s, target=t, weight="weight")
|
118 |
+
assert c == cw
|
119 |
+
|
120 |
+
def test_invalid_source(self):
|
121 |
+
with pytest.raises(nx.NetworkXError):
|
122 |
+
G = nx.DiGraph()
|
123 |
+
nx.average_degree_connectivity(G, source="bogus")
|
124 |
+
|
125 |
+
def test_invalid_target(self):
|
126 |
+
with pytest.raises(nx.NetworkXError):
|
127 |
+
G = nx.DiGraph()
|
128 |
+
nx.average_degree_connectivity(G, target="bogus")
|
129 |
+
|
130 |
+
def test_invalid_undirected_graph(self):
|
131 |
+
G = nx.Graph()
|
132 |
+
with pytest.raises(nx.NetworkXError):
|
133 |
+
nx.average_degree_connectivity(G, target="bogus")
|
134 |
+
with pytest.raises(nx.NetworkXError):
|
135 |
+
nx.average_degree_connectivity(G, source="bogus")
|
136 |
+
|
137 |
+
def test_single_node(self):
|
138 |
+
# TODO Is this really the intended behavior for providing a
|
139 |
+
# single node as the argument `nodes`? Shouldn't the function
|
140 |
+
# just return the connectivity value itself?
|
141 |
+
G = nx.trivial_graph()
|
142 |
+
conn = nx.average_degree_connectivity(G, nodes=0)
|
143 |
+
assert conn == {0: 0}
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py
ADDED
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
np = pytest.importorskip("numpy")
|
4 |
+
pytest.importorskip("scipy")
|
5 |
+
|
6 |
+
|
7 |
+
import networkx as nx
|
8 |
+
from networkx.algorithms.assortativity.correlation import attribute_ac
|
9 |
+
|
10 |
+
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
11 |
+
|
12 |
+
|
13 |
+
class TestDegreeMixingCorrelation(BaseTestDegreeMixing):
|
14 |
+
def test_degree_assortativity_undirected(self):
|
15 |
+
r = nx.degree_assortativity_coefficient(self.P4)
|
16 |
+
np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
|
17 |
+
|
18 |
+
def test_degree_assortativity_node_kwargs(self):
|
19 |
+
G = nx.Graph()
|
20 |
+
edges = [(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (5, 9), (9, 0)]
|
21 |
+
G.add_edges_from(edges)
|
22 |
+
r = nx.degree_assortativity_coefficient(G, nodes=[1, 2, 4])
|
23 |
+
np.testing.assert_almost_equal(r, -1.0, decimal=4)
|
24 |
+
|
25 |
+
def test_degree_assortativity_directed(self):
|
26 |
+
r = nx.degree_assortativity_coefficient(self.D)
|
27 |
+
np.testing.assert_almost_equal(r, -0.57735, decimal=4)
|
28 |
+
|
29 |
+
def test_degree_assortativity_directed2(self):
|
30 |
+
"""Test degree assortativity for a directed graph where the set of
|
31 |
+
in/out degree does not equal the total degree."""
|
32 |
+
r = nx.degree_assortativity_coefficient(self.D2)
|
33 |
+
np.testing.assert_almost_equal(r, 0.14852, decimal=4)
|
34 |
+
|
35 |
+
def test_degree_assortativity_multigraph(self):
|
36 |
+
r = nx.degree_assortativity_coefficient(self.M)
|
37 |
+
np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
|
38 |
+
|
39 |
+
def test_degree_pearson_assortativity_undirected(self):
|
40 |
+
r = nx.degree_pearson_correlation_coefficient(self.P4)
|
41 |
+
np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
|
42 |
+
|
43 |
+
def test_degree_pearson_assortativity_directed(self):
|
44 |
+
r = nx.degree_pearson_correlation_coefficient(self.D)
|
45 |
+
np.testing.assert_almost_equal(r, -0.57735, decimal=4)
|
46 |
+
|
47 |
+
def test_degree_pearson_assortativity_directed2(self):
|
48 |
+
"""Test degree assortativity with Pearson for a directed graph where
|
49 |
+
the set of in/out degree does not equal the total degree."""
|
50 |
+
r = nx.degree_pearson_correlation_coefficient(self.D2)
|
51 |
+
np.testing.assert_almost_equal(r, 0.14852, decimal=4)
|
52 |
+
|
53 |
+
def test_degree_pearson_assortativity_multigraph(self):
|
54 |
+
r = nx.degree_pearson_correlation_coefficient(self.M)
|
55 |
+
np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
|
56 |
+
|
57 |
+
def test_degree_assortativity_weighted(self):
|
58 |
+
r = nx.degree_assortativity_coefficient(self.W, weight="weight")
|
59 |
+
np.testing.assert_almost_equal(r, -0.1429, decimal=4)
|
60 |
+
|
61 |
+
def test_degree_assortativity_double_star(self):
|
62 |
+
r = nx.degree_assortativity_coefficient(self.DS)
|
63 |
+
np.testing.assert_almost_equal(r, -0.9339, decimal=4)
|
64 |
+
|
65 |
+
|
66 |
+
class TestAttributeMixingCorrelation(BaseTestAttributeMixing):
|
67 |
+
def test_attribute_assortativity_undirected(self):
|
68 |
+
r = nx.attribute_assortativity_coefficient(self.G, "fish")
|
69 |
+
assert r == 6.0 / 22.0
|
70 |
+
|
71 |
+
def test_attribute_assortativity_directed(self):
|
72 |
+
r = nx.attribute_assortativity_coefficient(self.D, "fish")
|
73 |
+
assert r == 1.0 / 3.0
|
74 |
+
|
75 |
+
def test_attribute_assortativity_multigraph(self):
|
76 |
+
r = nx.attribute_assortativity_coefficient(self.M, "fish")
|
77 |
+
assert r == 1.0
|
78 |
+
|
79 |
+
def test_attribute_assortativity_coefficient(self):
|
80 |
+
# from "Mixing patterns in networks"
|
81 |
+
# fmt: off
|
82 |
+
a = np.array([[0.258, 0.016, 0.035, 0.013],
|
83 |
+
[0.012, 0.157, 0.058, 0.019],
|
84 |
+
[0.013, 0.023, 0.306, 0.035],
|
85 |
+
[0.005, 0.007, 0.024, 0.016]])
|
86 |
+
# fmt: on
|
87 |
+
r = attribute_ac(a)
|
88 |
+
np.testing.assert_almost_equal(r, 0.623, decimal=3)
|
89 |
+
|
90 |
+
def test_attribute_assortativity_coefficient2(self):
|
91 |
+
# fmt: off
|
92 |
+
a = np.array([[0.18, 0.02, 0.01, 0.03],
|
93 |
+
[0.02, 0.20, 0.03, 0.02],
|
94 |
+
[0.01, 0.03, 0.16, 0.01],
|
95 |
+
[0.03, 0.02, 0.01, 0.22]])
|
96 |
+
# fmt: on
|
97 |
+
r = attribute_ac(a)
|
98 |
+
np.testing.assert_almost_equal(r, 0.68, decimal=2)
|
99 |
+
|
100 |
+
def test_attribute_assortativity(self):
|
101 |
+
a = np.array([[50, 50, 0], [50, 50, 0], [0, 0, 2]])
|
102 |
+
r = attribute_ac(a)
|
103 |
+
np.testing.assert_almost_equal(r, 0.029, decimal=3)
|
104 |
+
|
105 |
+
def test_attribute_assortativity_negative(self):
|
106 |
+
r = nx.numeric_assortativity_coefficient(self.N, "margin")
|
107 |
+
np.testing.assert_almost_equal(r, -0.2903, decimal=4)
|
108 |
+
|
109 |
+
def test_assortativity_node_kwargs(self):
|
110 |
+
G = nx.Graph()
|
111 |
+
G.add_nodes_from([0, 1], size=2)
|
112 |
+
G.add_nodes_from([2, 3], size=3)
|
113 |
+
G.add_edges_from([(0, 1), (2, 3)])
|
114 |
+
r = nx.numeric_assortativity_coefficient(G, "size", nodes=[0, 3])
|
115 |
+
np.testing.assert_almost_equal(r, 1.0, decimal=4)
|
116 |
+
|
117 |
+
def test_attribute_assortativity_float(self):
|
118 |
+
r = nx.numeric_assortativity_coefficient(self.F, "margin")
|
119 |
+
np.testing.assert_almost_equal(r, -0.1429, decimal=4)
|
120 |
+
|
121 |
+
def test_attribute_assortativity_mixed(self):
|
122 |
+
r = nx.numeric_assortativity_coefficient(self.K, "margin")
|
123 |
+
np.testing.assert_almost_equal(r, 0.4340, decimal=4)
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py
ADDED
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
np = pytest.importorskip("numpy")
|
4 |
+
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
|
8 |
+
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
9 |
+
|
10 |
+
|
11 |
+
class TestDegreeMixingDict(BaseTestDegreeMixing):
|
12 |
+
def test_degree_mixing_dict_undirected(self):
|
13 |
+
d = nx.degree_mixing_dict(self.P4)
|
14 |
+
d_result = {1: {2: 2}, 2: {1: 2, 2: 2}}
|
15 |
+
assert d == d_result
|
16 |
+
|
17 |
+
def test_degree_mixing_dict_undirected_normalized(self):
|
18 |
+
d = nx.degree_mixing_dict(self.P4, normalized=True)
|
19 |
+
d_result = {1: {2: 1.0 / 3}, 2: {1: 1.0 / 3, 2: 1.0 / 3}}
|
20 |
+
assert d == d_result
|
21 |
+
|
22 |
+
def test_degree_mixing_dict_directed(self):
|
23 |
+
d = nx.degree_mixing_dict(self.D)
|
24 |
+
print(d)
|
25 |
+
d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}}
|
26 |
+
assert d == d_result
|
27 |
+
|
28 |
+
def test_degree_mixing_dict_multigraph(self):
|
29 |
+
d = nx.degree_mixing_dict(self.M)
|
30 |
+
d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}}
|
31 |
+
assert d == d_result
|
32 |
+
|
33 |
+
def test_degree_mixing_dict_weighted(self):
|
34 |
+
d = nx.degree_mixing_dict(self.W, weight="weight")
|
35 |
+
d_result = {0.5: {1.5: 1}, 1.5: {1.5: 6, 0.5: 1}}
|
36 |
+
assert d == d_result
|
37 |
+
|
38 |
+
|
39 |
+
class TestDegreeMixingMatrix(BaseTestDegreeMixing):
|
40 |
+
def test_degree_mixing_matrix_undirected(self):
|
41 |
+
# fmt: off
|
42 |
+
a_result = np.array([[0, 2],
|
43 |
+
[2, 2]]
|
44 |
+
)
|
45 |
+
# fmt: on
|
46 |
+
a = nx.degree_mixing_matrix(self.P4, normalized=False)
|
47 |
+
np.testing.assert_equal(a, a_result)
|
48 |
+
a = nx.degree_mixing_matrix(self.P4)
|
49 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
50 |
+
|
51 |
+
def test_degree_mixing_matrix_directed(self):
|
52 |
+
# fmt: off
|
53 |
+
a_result = np.array([[0, 0, 2],
|
54 |
+
[1, 0, 1],
|
55 |
+
[0, 0, 0]]
|
56 |
+
)
|
57 |
+
# fmt: on
|
58 |
+
a = nx.degree_mixing_matrix(self.D, normalized=False)
|
59 |
+
np.testing.assert_equal(a, a_result)
|
60 |
+
a = nx.degree_mixing_matrix(self.D)
|
61 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
62 |
+
|
63 |
+
def test_degree_mixing_matrix_multigraph(self):
|
64 |
+
# fmt: off
|
65 |
+
a_result = np.array([[0, 1, 0],
|
66 |
+
[1, 0, 3],
|
67 |
+
[0, 3, 0]]
|
68 |
+
)
|
69 |
+
# fmt: on
|
70 |
+
a = nx.degree_mixing_matrix(self.M, normalized=False)
|
71 |
+
np.testing.assert_equal(a, a_result)
|
72 |
+
a = nx.degree_mixing_matrix(self.M)
|
73 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
74 |
+
|
75 |
+
def test_degree_mixing_matrix_selfloop(self):
|
76 |
+
# fmt: off
|
77 |
+
a_result = np.array([[2]])
|
78 |
+
# fmt: on
|
79 |
+
a = nx.degree_mixing_matrix(self.S, normalized=False)
|
80 |
+
np.testing.assert_equal(a, a_result)
|
81 |
+
a = nx.degree_mixing_matrix(self.S)
|
82 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
83 |
+
|
84 |
+
def test_degree_mixing_matrix_weighted(self):
|
85 |
+
a_result = np.array([[0.0, 1.0], [1.0, 6.0]])
|
86 |
+
a = nx.degree_mixing_matrix(self.W, weight="weight", normalized=False)
|
87 |
+
np.testing.assert_equal(a, a_result)
|
88 |
+
a = nx.degree_mixing_matrix(self.W, weight="weight")
|
89 |
+
np.testing.assert_equal(a, a_result / float(a_result.sum()))
|
90 |
+
|
91 |
+
def test_degree_mixing_matrix_mapping(self):
|
92 |
+
a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
|
93 |
+
mapping = {0.5: 1, 1.5: 0}
|
94 |
+
a = nx.degree_mixing_matrix(
|
95 |
+
self.W, weight="weight", normalized=False, mapping=mapping
|
96 |
+
)
|
97 |
+
np.testing.assert_equal(a, a_result)
|
98 |
+
|
99 |
+
|
100 |
+
class TestAttributeMixingDict(BaseTestAttributeMixing):
|
101 |
+
def test_attribute_mixing_dict_undirected(self):
|
102 |
+
d = nx.attribute_mixing_dict(self.G, "fish")
|
103 |
+
d_result = {
|
104 |
+
"one": {"one": 2, "red": 1},
|
105 |
+
"two": {"two": 2, "blue": 1},
|
106 |
+
"red": {"one": 1},
|
107 |
+
"blue": {"two": 1},
|
108 |
+
}
|
109 |
+
assert d == d_result
|
110 |
+
|
111 |
+
def test_attribute_mixing_dict_directed(self):
|
112 |
+
d = nx.attribute_mixing_dict(self.D, "fish")
|
113 |
+
d_result = {
|
114 |
+
"one": {"one": 1, "red": 1},
|
115 |
+
"two": {"two": 1, "blue": 1},
|
116 |
+
"red": {},
|
117 |
+
"blue": {},
|
118 |
+
}
|
119 |
+
assert d == d_result
|
120 |
+
|
121 |
+
def test_attribute_mixing_dict_multigraph(self):
|
122 |
+
d = nx.attribute_mixing_dict(self.M, "fish")
|
123 |
+
d_result = {"one": {"one": 4}, "two": {"two": 2}}
|
124 |
+
assert d == d_result
|
125 |
+
|
126 |
+
|
127 |
+
class TestAttributeMixingMatrix(BaseTestAttributeMixing):
|
128 |
+
def test_attribute_mixing_matrix_undirected(self):
|
129 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
130 |
+
a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]])
|
131 |
+
a = nx.attribute_mixing_matrix(
|
132 |
+
self.G, "fish", mapping=mapping, normalized=False
|
133 |
+
)
|
134 |
+
np.testing.assert_equal(a, a_result)
|
135 |
+
a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping)
|
136 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
137 |
+
|
138 |
+
def test_attribute_mixing_matrix_directed(self):
|
139 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
140 |
+
a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]])
|
141 |
+
a = nx.attribute_mixing_matrix(
|
142 |
+
self.D, "fish", mapping=mapping, normalized=False
|
143 |
+
)
|
144 |
+
np.testing.assert_equal(a, a_result)
|
145 |
+
a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping)
|
146 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
147 |
+
|
148 |
+
def test_attribute_mixing_matrix_multigraph(self):
|
149 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
150 |
+
a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
|
151 |
+
a = nx.attribute_mixing_matrix(
|
152 |
+
self.M, "fish", mapping=mapping, normalized=False
|
153 |
+
)
|
154 |
+
np.testing.assert_equal(a, a_result)
|
155 |
+
a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping)
|
156 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
157 |
+
|
158 |
+
def test_attribute_mixing_matrix_negative(self):
|
159 |
+
mapping = {-2: 0, -3: 1, -4: 2}
|
160 |
+
a_result = np.array([[4.0, 1.0, 1.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
|
161 |
+
a = nx.attribute_mixing_matrix(
|
162 |
+
self.N, "margin", mapping=mapping, normalized=False
|
163 |
+
)
|
164 |
+
np.testing.assert_equal(a, a_result)
|
165 |
+
a = nx.attribute_mixing_matrix(self.N, "margin", mapping=mapping)
|
166 |
+
np.testing.assert_equal(a, a_result / float(a_result.sum()))
|
167 |
+
|
168 |
+
def test_attribute_mixing_matrix_float(self):
|
169 |
+
mapping = {0.5: 1, 1.5: 0}
|
170 |
+
a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
|
171 |
+
a = nx.attribute_mixing_matrix(
|
172 |
+
self.F, "margin", mapping=mapping, normalized=False
|
173 |
+
)
|
174 |
+
np.testing.assert_equal(a, a_result)
|
175 |
+
a = nx.attribute_mixing_matrix(self.F, "margin", mapping=mapping)
|
176 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py
ADDED
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
|
6 |
+
class TestAverageNeighbor:
|
7 |
+
def test_degree_p4(self):
|
8 |
+
G = nx.path_graph(4)
|
9 |
+
answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2}
|
10 |
+
nd = nx.average_neighbor_degree(G)
|
11 |
+
assert nd == answer
|
12 |
+
|
13 |
+
D = G.to_directed()
|
14 |
+
nd = nx.average_neighbor_degree(D)
|
15 |
+
assert nd == answer
|
16 |
+
|
17 |
+
D = nx.DiGraph(G.edges(data=True))
|
18 |
+
nd = nx.average_neighbor_degree(D)
|
19 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
20 |
+
nd = nx.average_neighbor_degree(D, "in", "out")
|
21 |
+
assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
|
22 |
+
nd = nx.average_neighbor_degree(D, "out", "in")
|
23 |
+
assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
|
24 |
+
nd = nx.average_neighbor_degree(D, "in", "in")
|
25 |
+
assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
|
26 |
+
|
27 |
+
def test_degree_p4_weighted(self):
|
28 |
+
G = nx.path_graph(4)
|
29 |
+
G[1][2]["weight"] = 4
|
30 |
+
answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2}
|
31 |
+
nd = nx.average_neighbor_degree(G, weight="weight")
|
32 |
+
assert nd == answer
|
33 |
+
|
34 |
+
D = G.to_directed()
|
35 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
36 |
+
assert nd == answer
|
37 |
+
|
38 |
+
D = nx.DiGraph(G.edges(data=True))
|
39 |
+
print(D.edges(data=True))
|
40 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
41 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
42 |
+
nd = nx.average_neighbor_degree(D, "out", "out", weight="weight")
|
43 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
44 |
+
nd = nx.average_neighbor_degree(D, "in", "in", weight="weight")
|
45 |
+
assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
|
46 |
+
nd = nx.average_neighbor_degree(D, "in", "out", weight="weight")
|
47 |
+
assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
|
48 |
+
nd = nx.average_neighbor_degree(D, "out", "in", weight="weight")
|
49 |
+
assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
|
50 |
+
nd = nx.average_neighbor_degree(D, source="in+out", weight="weight")
|
51 |
+
assert nd == {0: 1.0, 1: 1.0, 2: 0.8, 3: 1.0}
|
52 |
+
nd = nx.average_neighbor_degree(D, target="in+out", weight="weight")
|
53 |
+
assert nd == {0: 2.0, 1: 2.0, 2: 1.0, 3: 0.0}
|
54 |
+
|
55 |
+
D = G.to_directed()
|
56 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
57 |
+
assert nd == answer
|
58 |
+
nd = nx.average_neighbor_degree(D, source="out", target="out", weight="weight")
|
59 |
+
assert nd == answer
|
60 |
+
|
61 |
+
D = G.to_directed()
|
62 |
+
nd = nx.average_neighbor_degree(D, source="in", target="in", weight="weight")
|
63 |
+
assert nd == answer
|
64 |
+
|
65 |
+
def test_degree_k4(self):
|
66 |
+
G = nx.complete_graph(4)
|
67 |
+
answer = {0: 3, 1: 3, 2: 3, 3: 3}
|
68 |
+
nd = nx.average_neighbor_degree(G)
|
69 |
+
assert nd == answer
|
70 |
+
|
71 |
+
D = G.to_directed()
|
72 |
+
nd = nx.average_neighbor_degree(D)
|
73 |
+
assert nd == answer
|
74 |
+
|
75 |
+
D = G.to_directed()
|
76 |
+
nd = nx.average_neighbor_degree(D)
|
77 |
+
assert nd == answer
|
78 |
+
|
79 |
+
D = G.to_directed()
|
80 |
+
nd = nx.average_neighbor_degree(D, source="in", target="in")
|
81 |
+
assert nd == answer
|
82 |
+
|
83 |
+
def test_degree_k4_nodes(self):
|
84 |
+
G = nx.complete_graph(4)
|
85 |
+
answer = {1: 3.0, 2: 3.0}
|
86 |
+
nd = nx.average_neighbor_degree(G, nodes=[1, 2])
|
87 |
+
assert nd == answer
|
88 |
+
|
89 |
+
def test_degree_barrat(self):
|
90 |
+
G = nx.star_graph(5)
|
91 |
+
G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
|
92 |
+
G[0][5]["weight"] = 5
|
93 |
+
nd = nx.average_neighbor_degree(G)[5]
|
94 |
+
assert nd == 1.8
|
95 |
+
nd = nx.average_neighbor_degree(G, weight="weight")[5]
|
96 |
+
assert nd == pytest.approx(3.222222, abs=1e-5)
|
97 |
+
|
98 |
+
def test_error_invalid_source_target(self):
|
99 |
+
G = nx.path_graph(4)
|
100 |
+
with pytest.raises(nx.NetworkXError):
|
101 |
+
nx.average_neighbor_degree(G, "error")
|
102 |
+
with pytest.raises(nx.NetworkXError):
|
103 |
+
nx.average_neighbor_degree(G, "in", "error")
|
104 |
+
G = G.to_directed()
|
105 |
+
with pytest.raises(nx.NetworkXError):
|
106 |
+
nx.average_neighbor_degree(G, "error")
|
107 |
+
with pytest.raises(nx.NetworkXError):
|
108 |
+
nx.average_neighbor_degree(G, "in", "error")
|
venv/lib/python3.10/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import networkx as nx
|
2 |
+
|
3 |
+
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
4 |
+
|
5 |
+
|
6 |
+
class TestAttributeMixingXY(BaseTestAttributeMixing):
|
7 |
+
def test_node_attribute_xy_undirected(self):
|
8 |
+
attrxy = sorted(nx.node_attribute_xy(self.G, "fish"))
|
9 |
+
attrxy_result = sorted(
|
10 |
+
[
|
11 |
+
("one", "one"),
|
12 |
+
("one", "one"),
|
13 |
+
("two", "two"),
|
14 |
+
("two", "two"),
|
15 |
+
("one", "red"),
|
16 |
+
("red", "one"),
|
17 |
+
("blue", "two"),
|
18 |
+
("two", "blue"),
|
19 |
+
]
|
20 |
+
)
|
21 |
+
assert attrxy == attrxy_result
|
22 |
+
|
23 |
+
def test_node_attribute_xy_undirected_nodes(self):
|
24 |
+
attrxy = sorted(nx.node_attribute_xy(self.G, "fish", nodes=["one", "yellow"]))
|
25 |
+
attrxy_result = sorted([])
|
26 |
+
assert attrxy == attrxy_result
|
27 |
+
|
28 |
+
def test_node_attribute_xy_directed(self):
|
29 |
+
attrxy = sorted(nx.node_attribute_xy(self.D, "fish"))
|
30 |
+
attrxy_result = sorted(
|
31 |
+
[("one", "one"), ("two", "two"), ("one", "red"), ("two", "blue")]
|
32 |
+
)
|
33 |
+
assert attrxy == attrxy_result
|
34 |
+
|
35 |
+
def test_node_attribute_xy_multigraph(self):
|
36 |
+
attrxy = sorted(nx.node_attribute_xy(self.M, "fish"))
|
37 |
+
attrxy_result = [
|
38 |
+
("one", "one"),
|
39 |
+
("one", "one"),
|
40 |
+
("one", "one"),
|
41 |
+
("one", "one"),
|
42 |
+
("two", "two"),
|
43 |
+
("two", "two"),
|
44 |
+
]
|
45 |
+
assert attrxy == attrxy_result
|
46 |
+
|
47 |
+
def test_node_attribute_xy_selfloop(self):
|
48 |
+
attrxy = sorted(nx.node_attribute_xy(self.S, "fish"))
|
49 |
+
attrxy_result = [("one", "one"), ("two", "two")]
|
50 |
+
assert attrxy == attrxy_result
|
51 |
+
|
52 |
+
|
53 |
+
class TestDegreeMixingXY(BaseTestDegreeMixing):
|
54 |
+
def test_node_degree_xy_undirected(self):
|
55 |
+
xy = sorted(nx.node_degree_xy(self.P4))
|
56 |
+
xy_result = sorted([(1, 2), (2, 1), (2, 2), (2, 2), (1, 2), (2, 1)])
|
57 |
+
assert xy == xy_result
|
58 |
+
|
59 |
+
def test_node_degree_xy_undirected_nodes(self):
|
60 |
+
xy = sorted(nx.node_degree_xy(self.P4, nodes=[0, 1, -1]))
|
61 |
+
xy_result = sorted([(1, 2), (2, 1)])
|
62 |
+
assert xy == xy_result
|
63 |
+
|
64 |
+
def test_node_degree_xy_directed(self):
|
65 |
+
xy = sorted(nx.node_degree_xy(self.D))
|
66 |
+
xy_result = sorted([(2, 1), (2, 3), (1, 3), (1, 3)])
|
67 |
+
assert xy == xy_result
|
68 |
+
|
69 |
+
def test_node_degree_xy_multigraph(self):
|
70 |
+
xy = sorted(nx.node_degree_xy(self.M))
|
71 |
+
xy_result = sorted(
|
72 |
+
[(2, 3), (2, 3), (3, 2), (3, 2), (2, 3), (3, 2), (1, 2), (2, 1)]
|
73 |
+
)
|
74 |
+
assert xy == xy_result
|
75 |
+
|
76 |
+
def test_node_degree_xy_selfloop(self):
|
77 |
+
xy = sorted(nx.node_degree_xy(self.S))
|
78 |
+
xy_result = sorted([(2, 2), (2, 2)])
|
79 |
+
assert xy == xy_result
|
80 |
+
|
81 |
+
def test_node_degree_xy_weighted(self):
|
82 |
+
G = nx.Graph()
|
83 |
+
G.add_edge(1, 2, weight=7)
|
84 |
+
G.add_edge(2, 3, weight=10)
|
85 |
+
xy = sorted(nx.node_degree_xy(G, weight="weight"))
|
86 |
+
xy_result = sorted([(7, 17), (17, 10), (17, 7), (10, 17)])
|
87 |
+
assert xy == xy_result
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/__init__.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .betweenness import *
|
2 |
+
from .betweenness_subset import *
|
3 |
+
from .closeness import *
|
4 |
+
from .current_flow_betweenness import *
|
5 |
+
from .current_flow_betweenness_subset import *
|
6 |
+
from .current_flow_closeness import *
|
7 |
+
from .degree_alg import *
|
8 |
+
from .dispersion import *
|
9 |
+
from .eigenvector import *
|
10 |
+
from .group import *
|
11 |
+
from .harmonic import *
|
12 |
+
from .katz import *
|
13 |
+
from .load import *
|
14 |
+
from .percolation import *
|
15 |
+
from .reaching import *
|
16 |
+
from .second_order import *
|
17 |
+
from .subgraph_alg import *
|
18 |
+
from .trophic import *
|
19 |
+
from .voterank_alg import *
|
20 |
+
from .laplacian import *
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/betweenness_subset.py
ADDED
@@ -0,0 +1,274 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Betweenness centrality measures for subsets of nodes."""
|
2 |
+
import networkx as nx
|
3 |
+
from networkx.algorithms.centrality.betweenness import (
|
4 |
+
_add_edge_keys,
|
5 |
+
)
|
6 |
+
from networkx.algorithms.centrality.betweenness import (
|
7 |
+
_single_source_dijkstra_path_basic as dijkstra,
|
8 |
+
)
|
9 |
+
from networkx.algorithms.centrality.betweenness import (
|
10 |
+
_single_source_shortest_path_basic as shortest_path,
|
11 |
+
)
|
12 |
+
|
13 |
+
__all__ = [
|
14 |
+
"betweenness_centrality_subset",
|
15 |
+
"edge_betweenness_centrality_subset",
|
16 |
+
]
|
17 |
+
|
18 |
+
|
19 |
+
@nx._dispatchable(edge_attrs="weight")
|
20 |
+
def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None):
|
21 |
+
r"""Compute betweenness centrality for a subset of nodes.
|
22 |
+
|
23 |
+
.. math::
|
24 |
+
|
25 |
+
c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
26 |
+
|
27 |
+
where $S$ is the set of sources, $T$ is the set of targets,
|
28 |
+
$\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
|
29 |
+
and $\sigma(s, t|v)$ is the number of those paths
|
30 |
+
passing through some node $v$ other than $s, t$.
|
31 |
+
If $s = t$, $\sigma(s, t) = 1$,
|
32 |
+
and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_.
|
33 |
+
|
34 |
+
|
35 |
+
Parameters
|
36 |
+
----------
|
37 |
+
G : graph
|
38 |
+
A NetworkX graph.
|
39 |
+
|
40 |
+
sources: list of nodes
|
41 |
+
Nodes to use as sources for shortest paths in betweenness
|
42 |
+
|
43 |
+
targets: list of nodes
|
44 |
+
Nodes to use as targets for shortest paths in betweenness
|
45 |
+
|
46 |
+
normalized : bool, optional
|
47 |
+
If True the betweenness values are normalized by $2/((n-1)(n-2))$
|
48 |
+
for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$
|
49 |
+
is the number of nodes in G.
|
50 |
+
|
51 |
+
weight : None or string, optional (default=None)
|
52 |
+
If None, all edge weights are considered equal.
|
53 |
+
Otherwise holds the name of the edge attribute used as weight.
|
54 |
+
Weights are used to calculate weighted shortest paths, so they are
|
55 |
+
interpreted as distances.
|
56 |
+
|
57 |
+
Returns
|
58 |
+
-------
|
59 |
+
nodes : dictionary
|
60 |
+
Dictionary of nodes with betweenness centrality as the value.
|
61 |
+
|
62 |
+
See Also
|
63 |
+
--------
|
64 |
+
edge_betweenness_centrality
|
65 |
+
load_centrality
|
66 |
+
|
67 |
+
Notes
|
68 |
+
-----
|
69 |
+
The basic algorithm is from [1]_.
|
70 |
+
|
71 |
+
For weighted graphs the edge weights must be greater than zero.
|
72 |
+
Zero edge weights can produce an infinite number of equal length
|
73 |
+
paths between pairs of nodes.
|
74 |
+
|
75 |
+
The normalization might seem a little strange but it is
|
76 |
+
designed to make betweenness_centrality(G) be the same as
|
77 |
+
betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
|
78 |
+
|
79 |
+
The total number of paths between source and target is counted
|
80 |
+
differently for directed and undirected graphs. Directed paths
|
81 |
+
are easy to count. Undirected paths are tricky: should a path
|
82 |
+
from "u" to "v" count as 1 undirected path or as 2 directed paths?
|
83 |
+
|
84 |
+
For betweenness_centrality we report the number of undirected
|
85 |
+
paths when G is undirected.
|
86 |
+
|
87 |
+
For betweenness_centrality_subset the reporting is different.
|
88 |
+
If the source and target subsets are the same, then we want
|
89 |
+
to count undirected paths. But if the source and target subsets
|
90 |
+
differ -- for example, if sources is {0} and targets is {1},
|
91 |
+
then we are only counting the paths in one direction. They are
|
92 |
+
undirected paths but we are counting them in a directed way.
|
93 |
+
To count them as undirected paths, each should count as half a path.
|
94 |
+
|
95 |
+
References
|
96 |
+
----------
|
97 |
+
.. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
|
98 |
+
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
99 |
+
https://doi.org/10.1080/0022250X.2001.9990249
|
100 |
+
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
|
101 |
+
Centrality and their Generic Computation.
|
102 |
+
Social Networks 30(2):136-145, 2008.
|
103 |
+
https://doi.org/10.1016/j.socnet.2007.11.001
|
104 |
+
"""
|
105 |
+
b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
106 |
+
for s in sources:
|
107 |
+
# single source shortest paths
|
108 |
+
if weight is None: # use BFS
|
109 |
+
S, P, sigma, _ = shortest_path(G, s)
|
110 |
+
else: # use Dijkstra's algorithm
|
111 |
+
S, P, sigma, _ = dijkstra(G, s, weight)
|
112 |
+
b = _accumulate_subset(b, S, P, sigma, s, targets)
|
113 |
+
b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed())
|
114 |
+
return b
|
115 |
+
|
116 |
+
|
117 |
+
@nx._dispatchable(edge_attrs="weight")
|
118 |
+
def edge_betweenness_centrality_subset(
|
119 |
+
G, sources, targets, normalized=False, weight=None
|
120 |
+
):
|
121 |
+
r"""Compute betweenness centrality for edges for a subset of nodes.
|
122 |
+
|
123 |
+
.. math::
|
124 |
+
|
125 |
+
c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)}
|
126 |
+
|
127 |
+
where $S$ is the set of sources, $T$ is the set of targets,
|
128 |
+
$\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
|
129 |
+
and $\sigma(s, t|e)$ is the number of those paths
|
130 |
+
passing through edge $e$ [2]_.
|
131 |
+
|
132 |
+
Parameters
|
133 |
+
----------
|
134 |
+
G : graph
|
135 |
+
A networkx graph.
|
136 |
+
|
137 |
+
sources: list of nodes
|
138 |
+
Nodes to use as sources for shortest paths in betweenness
|
139 |
+
|
140 |
+
targets: list of nodes
|
141 |
+
Nodes to use as targets for shortest paths in betweenness
|
142 |
+
|
143 |
+
normalized : bool, optional
|
144 |
+
If True the betweenness values are normalized by `2/(n(n-1))`
|
145 |
+
for graphs, and `1/(n(n-1))` for directed graphs where `n`
|
146 |
+
is the number of nodes in G.
|
147 |
+
|
148 |
+
weight : None or string, optional (default=None)
|
149 |
+
If None, all edge weights are considered equal.
|
150 |
+
Otherwise holds the name of the edge attribute used as weight.
|
151 |
+
Weights are used to calculate weighted shortest paths, so they are
|
152 |
+
interpreted as distances.
|
153 |
+
|
154 |
+
Returns
|
155 |
+
-------
|
156 |
+
edges : dictionary
|
157 |
+
Dictionary of edges with Betweenness centrality as the value.
|
158 |
+
|
159 |
+
See Also
|
160 |
+
--------
|
161 |
+
betweenness_centrality
|
162 |
+
edge_load
|
163 |
+
|
164 |
+
Notes
|
165 |
+
-----
|
166 |
+
The basic algorithm is from [1]_.
|
167 |
+
|
168 |
+
For weighted graphs the edge weights must be greater than zero.
|
169 |
+
Zero edge weights can produce an infinite number of equal length
|
170 |
+
paths between pairs of nodes.
|
171 |
+
|
172 |
+
The normalization might seem a little strange but it is the same
|
173 |
+
as in edge_betweenness_centrality() and is designed to make
|
174 |
+
edge_betweenness_centrality(G) be the same as
|
175 |
+
edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
|
176 |
+
|
177 |
+
References
|
178 |
+
----------
|
179 |
+
.. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
|
180 |
+
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
181 |
+
https://doi.org/10.1080/0022250X.2001.9990249
|
182 |
+
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
|
183 |
+
Centrality and their Generic Computation.
|
184 |
+
Social Networks 30(2):136-145, 2008.
|
185 |
+
https://doi.org/10.1016/j.socnet.2007.11.001
|
186 |
+
"""
|
187 |
+
b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
188 |
+
b.update(dict.fromkeys(G.edges(), 0.0)) # b[e] for e in G.edges()
|
189 |
+
for s in sources:
|
190 |
+
# single source shortest paths
|
191 |
+
if weight is None: # use BFS
|
192 |
+
S, P, sigma, _ = shortest_path(G, s)
|
193 |
+
else: # use Dijkstra's algorithm
|
194 |
+
S, P, sigma, _ = dijkstra(G, s, weight)
|
195 |
+
b = _accumulate_edges_subset(b, S, P, sigma, s, targets)
|
196 |
+
for n in G: # remove nodes to only return edges
|
197 |
+
del b[n]
|
198 |
+
b = _rescale_e(b, len(G), normalized=normalized, directed=G.is_directed())
|
199 |
+
if G.is_multigraph():
|
200 |
+
b = _add_edge_keys(G, b, weight=weight)
|
201 |
+
return b
|
202 |
+
|
203 |
+
|
204 |
+
def _accumulate_subset(betweenness, S, P, sigma, s, targets):
|
205 |
+
delta = dict.fromkeys(S, 0.0)
|
206 |
+
target_set = set(targets) - {s}
|
207 |
+
while S:
|
208 |
+
w = S.pop()
|
209 |
+
if w in target_set:
|
210 |
+
coeff = (delta[w] + 1.0) / sigma[w]
|
211 |
+
else:
|
212 |
+
coeff = delta[w] / sigma[w]
|
213 |
+
for v in P[w]:
|
214 |
+
delta[v] += sigma[v] * coeff
|
215 |
+
if w != s:
|
216 |
+
betweenness[w] += delta[w]
|
217 |
+
return betweenness
|
218 |
+
|
219 |
+
|
220 |
+
def _accumulate_edges_subset(betweenness, S, P, sigma, s, targets):
|
221 |
+
"""edge_betweenness_centrality_subset helper."""
|
222 |
+
delta = dict.fromkeys(S, 0)
|
223 |
+
target_set = set(targets)
|
224 |
+
while S:
|
225 |
+
w = S.pop()
|
226 |
+
for v in P[w]:
|
227 |
+
if w in target_set:
|
228 |
+
c = (sigma[v] / sigma[w]) * (1.0 + delta[w])
|
229 |
+
else:
|
230 |
+
c = delta[w] / len(P[w])
|
231 |
+
if (v, w) not in betweenness:
|
232 |
+
betweenness[(w, v)] += c
|
233 |
+
else:
|
234 |
+
betweenness[(v, w)] += c
|
235 |
+
delta[v] += c
|
236 |
+
if w != s:
|
237 |
+
betweenness[w] += delta[w]
|
238 |
+
return betweenness
|
239 |
+
|
240 |
+
|
241 |
+
def _rescale(betweenness, n, normalized, directed=False):
|
242 |
+
"""betweenness_centrality_subset helper."""
|
243 |
+
if normalized:
|
244 |
+
if n <= 2:
|
245 |
+
scale = None # no normalization b=0 for all nodes
|
246 |
+
else:
|
247 |
+
scale = 1.0 / ((n - 1) * (n - 2))
|
248 |
+
else: # rescale by 2 for undirected graphs
|
249 |
+
if not directed:
|
250 |
+
scale = 0.5
|
251 |
+
else:
|
252 |
+
scale = None
|
253 |
+
if scale is not None:
|
254 |
+
for v in betweenness:
|
255 |
+
betweenness[v] *= scale
|
256 |
+
return betweenness
|
257 |
+
|
258 |
+
|
259 |
+
def _rescale_e(betweenness, n, normalized, directed=False):
|
260 |
+
"""edge_betweenness_centrality_subset helper."""
|
261 |
+
if normalized:
|
262 |
+
if n <= 1:
|
263 |
+
scale = None # no normalization b=0 for all nodes
|
264 |
+
else:
|
265 |
+
scale = 1.0 / (n * (n - 1))
|
266 |
+
else: # rescale by 2 for undirected graphs
|
267 |
+
if not directed:
|
268 |
+
scale = 0.5
|
269 |
+
else:
|
270 |
+
scale = None
|
271 |
+
if scale is not None:
|
272 |
+
for v in betweenness:
|
273 |
+
betweenness[v] *= scale
|
274 |
+
return betweenness
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/closeness.py
ADDED
@@ -0,0 +1,281 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Closeness centrality measures.
|
3 |
+
"""
|
4 |
+
import functools
|
5 |
+
|
6 |
+
import networkx as nx
|
7 |
+
from networkx.exception import NetworkXError
|
8 |
+
from networkx.utils.decorators import not_implemented_for
|
9 |
+
|
10 |
+
__all__ = ["closeness_centrality", "incremental_closeness_centrality"]
|
11 |
+
|
12 |
+
|
13 |
+
@nx._dispatchable(edge_attrs="distance")
|
14 |
+
def closeness_centrality(G, u=None, distance=None, wf_improved=True):
|
15 |
+
r"""Compute closeness centrality for nodes.
|
16 |
+
|
17 |
+
Closeness centrality [1]_ of a node `u` is the reciprocal of the
|
18 |
+
average shortest path distance to `u` over all `n-1` reachable nodes.
|
19 |
+
|
20 |
+
.. math::
|
21 |
+
|
22 |
+
C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
23 |
+
|
24 |
+
where `d(v, u)` is the shortest-path distance between `v` and `u`,
|
25 |
+
and `n-1` is the number of nodes reachable from `u`. Notice that the
|
26 |
+
closeness distance function computes the incoming distance to `u`
|
27 |
+
for directed graphs. To use outward distance, act on `G.reverse()`.
|
28 |
+
|
29 |
+
Notice that higher values of closeness indicate higher centrality.
|
30 |
+
|
31 |
+
Wasserman and Faust propose an improved formula for graphs with
|
32 |
+
more than one connected component. The result is "a ratio of the
|
33 |
+
fraction of actors in the group who are reachable, to the average
|
34 |
+
distance" from the reachable actors [2]_. You might think this
|
35 |
+
scale factor is inverted but it is not. As is, nodes from small
|
36 |
+
components receive a smaller closeness value. Letting `N` denote
|
37 |
+
the number of nodes in the graph,
|
38 |
+
|
39 |
+
.. math::
|
40 |
+
|
41 |
+
C_{WF}(u) = \frac{n-1}{N-1} \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
42 |
+
|
43 |
+
Parameters
|
44 |
+
----------
|
45 |
+
G : graph
|
46 |
+
A NetworkX graph
|
47 |
+
|
48 |
+
u : node, optional
|
49 |
+
Return only the value for node u
|
50 |
+
|
51 |
+
distance : edge attribute key, optional (default=None)
|
52 |
+
Use the specified edge attribute as the edge distance in shortest
|
53 |
+
path calculations. If `None` (the default) all edges have a distance of 1.
|
54 |
+
Absent edge attributes are assigned a distance of 1. Note that no check
|
55 |
+
is performed to ensure that edges have the provided attribute.
|
56 |
+
|
57 |
+
wf_improved : bool, optional (default=True)
|
58 |
+
If True, scale by the fraction of nodes reachable. This gives the
|
59 |
+
Wasserman and Faust improved formula. For single component graphs
|
60 |
+
it is the same as the original formula.
|
61 |
+
|
62 |
+
Returns
|
63 |
+
-------
|
64 |
+
nodes : dictionary
|
65 |
+
Dictionary of nodes with closeness centrality as the value.
|
66 |
+
|
67 |
+
Examples
|
68 |
+
--------
|
69 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
70 |
+
>>> nx.closeness_centrality(G)
|
71 |
+
{0: 1.0, 1: 1.0, 2: 0.75, 3: 0.75}
|
72 |
+
|
73 |
+
See Also
|
74 |
+
--------
|
75 |
+
betweenness_centrality, load_centrality, eigenvector_centrality,
|
76 |
+
degree_centrality, incremental_closeness_centrality
|
77 |
+
|
78 |
+
Notes
|
79 |
+
-----
|
80 |
+
The closeness centrality is normalized to `(n-1)/(|G|-1)` where
|
81 |
+
`n` is the number of nodes in the connected part of graph
|
82 |
+
containing the node. If the graph is not completely connected,
|
83 |
+
this algorithm computes the closeness centrality for each
|
84 |
+
connected part separately scaled by that parts size.
|
85 |
+
|
86 |
+
If the 'distance' keyword is set to an edge attribute key then the
|
87 |
+
shortest-path length will be computed using Dijkstra's algorithm with
|
88 |
+
that edge attribute as the edge weight.
|
89 |
+
|
90 |
+
The closeness centrality uses *inward* distance to a node, not outward.
|
91 |
+
If you want to use outword distances apply the function to `G.reverse()`
|
92 |
+
|
93 |
+
In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the
|
94 |
+
outward distance rather than the inward distance. If you use a 'distance'
|
95 |
+
keyword and a DiGraph, your results will change between v2.2 and v2.3.
|
96 |
+
|
97 |
+
References
|
98 |
+
----------
|
99 |
+
.. [1] Linton C. Freeman: Centrality in networks: I.
|
100 |
+
Conceptual clarification. Social Networks 1:215-239, 1979.
|
101 |
+
https://doi.org/10.1016/0378-8733(78)90021-7
|
102 |
+
.. [2] pg. 201 of Wasserman, S. and Faust, K.,
|
103 |
+
Social Network Analysis: Methods and Applications, 1994,
|
104 |
+
Cambridge University Press.
|
105 |
+
"""
|
106 |
+
if G.is_directed():
|
107 |
+
G = G.reverse() # create a reversed graph view
|
108 |
+
|
109 |
+
if distance is not None:
|
110 |
+
# use Dijkstra's algorithm with specified attribute as edge weight
|
111 |
+
path_length = functools.partial(
|
112 |
+
nx.single_source_dijkstra_path_length, weight=distance
|
113 |
+
)
|
114 |
+
else:
|
115 |
+
path_length = nx.single_source_shortest_path_length
|
116 |
+
|
117 |
+
if u is None:
|
118 |
+
nodes = G.nodes
|
119 |
+
else:
|
120 |
+
nodes = [u]
|
121 |
+
closeness_dict = {}
|
122 |
+
for n in nodes:
|
123 |
+
sp = path_length(G, n)
|
124 |
+
totsp = sum(sp.values())
|
125 |
+
len_G = len(G)
|
126 |
+
_closeness_centrality = 0.0
|
127 |
+
if totsp > 0.0 and len_G > 1:
|
128 |
+
_closeness_centrality = (len(sp) - 1.0) / totsp
|
129 |
+
# normalize to number of nodes-1 in connected part
|
130 |
+
if wf_improved:
|
131 |
+
s = (len(sp) - 1.0) / (len_G - 1)
|
132 |
+
_closeness_centrality *= s
|
133 |
+
closeness_dict[n] = _closeness_centrality
|
134 |
+
if u is not None:
|
135 |
+
return closeness_dict[u]
|
136 |
+
return closeness_dict
|
137 |
+
|
138 |
+
|
139 |
+
@not_implemented_for("directed")
|
140 |
+
@nx._dispatchable(mutates_input=True)
|
141 |
+
def incremental_closeness_centrality(
|
142 |
+
G, edge, prev_cc=None, insertion=True, wf_improved=True
|
143 |
+
):
|
144 |
+
r"""Incremental closeness centrality for nodes.
|
145 |
+
|
146 |
+
Compute closeness centrality for nodes using level-based work filtering
|
147 |
+
as described in Incremental Algorithms for Closeness Centrality by Sariyuce et al.
|
148 |
+
|
149 |
+
Level-based work filtering detects unnecessary updates to the closeness
|
150 |
+
centrality and filters them out.
|
151 |
+
|
152 |
+
---
|
153 |
+
From "Incremental Algorithms for Closeness Centrality":
|
154 |
+
|
155 |
+
Theorem 1: Let :math:`G = (V, E)` be a graph and u and v be two vertices in V
|
156 |
+
such that there is no edge (u, v) in E. Let :math:`G' = (V, E \cup uv)`
|
157 |
+
Then :math:`cc[s] = cc'[s]` if and only if :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`.
|
158 |
+
|
159 |
+
Where :math:`dG(u, v)` denotes the length of the shortest path between
|
160 |
+
two vertices u, v in a graph G, cc[s] is the closeness centrality for a
|
161 |
+
vertex s in V, and cc'[s] is the closeness centrality for a
|
162 |
+
vertex s in V, with the (u, v) edge added.
|
163 |
+
---
|
164 |
+
|
165 |
+
We use Theorem 1 to filter out updates when adding or removing an edge.
|
166 |
+
When adding an edge (u, v), we compute the shortest path lengths from all
|
167 |
+
other nodes to u and to v before the node is added. When removing an edge,
|
168 |
+
we compute the shortest path lengths after the edge is removed. Then we
|
169 |
+
apply Theorem 1 to use previously computed closeness centrality for nodes
|
170 |
+
where :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. This works only for
|
171 |
+
undirected, unweighted graphs; the distance argument is not supported.
|
172 |
+
|
173 |
+
Closeness centrality [1]_ of a node `u` is the reciprocal of the
|
174 |
+
sum of the shortest path distances from `u` to all `n-1` other nodes.
|
175 |
+
Since the sum of distances depends on the number of nodes in the
|
176 |
+
graph, closeness is normalized by the sum of minimum possible
|
177 |
+
distances `n-1`.
|
178 |
+
|
179 |
+
.. math::
|
180 |
+
|
181 |
+
C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
182 |
+
|
183 |
+
where `d(v, u)` is the shortest-path distance between `v` and `u`,
|
184 |
+
and `n` is the number of nodes in the graph.
|
185 |
+
|
186 |
+
Notice that higher values of closeness indicate higher centrality.
|
187 |
+
|
188 |
+
Parameters
|
189 |
+
----------
|
190 |
+
G : graph
|
191 |
+
A NetworkX graph
|
192 |
+
|
193 |
+
edge : tuple
|
194 |
+
The modified edge (u, v) in the graph.
|
195 |
+
|
196 |
+
prev_cc : dictionary
|
197 |
+
The previous closeness centrality for all nodes in the graph.
|
198 |
+
|
199 |
+
insertion : bool, optional
|
200 |
+
If True (default) the edge was inserted, otherwise it was deleted from the graph.
|
201 |
+
|
202 |
+
wf_improved : bool, optional (default=True)
|
203 |
+
If True, scale by the fraction of nodes reachable. This gives the
|
204 |
+
Wasserman and Faust improved formula. For single component graphs
|
205 |
+
it is the same as the original formula.
|
206 |
+
|
207 |
+
Returns
|
208 |
+
-------
|
209 |
+
nodes : dictionary
|
210 |
+
Dictionary of nodes with closeness centrality as the value.
|
211 |
+
|
212 |
+
See Also
|
213 |
+
--------
|
214 |
+
betweenness_centrality, load_centrality, eigenvector_centrality,
|
215 |
+
degree_centrality, closeness_centrality
|
216 |
+
|
217 |
+
Notes
|
218 |
+
-----
|
219 |
+
The closeness centrality is normalized to `(n-1)/(|G|-1)` where
|
220 |
+
`n` is the number of nodes in the connected part of graph
|
221 |
+
containing the node. If the graph is not completely connected,
|
222 |
+
this algorithm computes the closeness centrality for each
|
223 |
+
connected part separately.
|
224 |
+
|
225 |
+
References
|
226 |
+
----------
|
227 |
+
.. [1] Freeman, L.C., 1979. Centrality in networks: I.
|
228 |
+
Conceptual clarification. Social Networks 1, 215--239.
|
229 |
+
https://doi.org/10.1016/0378-8733(78)90021-7
|
230 |
+
.. [2] Sariyuce, A.E. ; Kaya, K. ; Saule, E. ; Catalyiirek, U.V. Incremental
|
231 |
+
Algorithms for Closeness Centrality. 2013 IEEE International Conference on Big Data
|
232 |
+
http://sariyuce.com/papers/bigdata13.pdf
|
233 |
+
"""
|
234 |
+
if prev_cc is not None and set(prev_cc.keys()) != set(G.nodes()):
|
235 |
+
raise NetworkXError("prev_cc and G do not have the same nodes")
|
236 |
+
|
237 |
+
# Unpack edge
|
238 |
+
(u, v) = edge
|
239 |
+
path_length = nx.single_source_shortest_path_length
|
240 |
+
|
241 |
+
if insertion:
|
242 |
+
# For edge insertion, we want shortest paths before the edge is inserted
|
243 |
+
du = path_length(G, u)
|
244 |
+
dv = path_length(G, v)
|
245 |
+
|
246 |
+
G.add_edge(u, v)
|
247 |
+
else:
|
248 |
+
G.remove_edge(u, v)
|
249 |
+
|
250 |
+
# For edge removal, we want shortest paths after the edge is removed
|
251 |
+
du = path_length(G, u)
|
252 |
+
dv = path_length(G, v)
|
253 |
+
|
254 |
+
if prev_cc is None:
|
255 |
+
return nx.closeness_centrality(G)
|
256 |
+
|
257 |
+
nodes = G.nodes()
|
258 |
+
closeness_dict = {}
|
259 |
+
for n in nodes:
|
260 |
+
if n in du and n in dv and abs(du[n] - dv[n]) <= 1:
|
261 |
+
closeness_dict[n] = prev_cc[n]
|
262 |
+
else:
|
263 |
+
sp = path_length(G, n)
|
264 |
+
totsp = sum(sp.values())
|
265 |
+
len_G = len(G)
|
266 |
+
_closeness_centrality = 0.0
|
267 |
+
if totsp > 0.0 and len_G > 1:
|
268 |
+
_closeness_centrality = (len(sp) - 1.0) / totsp
|
269 |
+
# normalize to number of nodes-1 in connected part
|
270 |
+
if wf_improved:
|
271 |
+
s = (len(sp) - 1.0) / (len_G - 1)
|
272 |
+
_closeness_centrality *= s
|
273 |
+
closeness_dict[n] = _closeness_centrality
|
274 |
+
|
275 |
+
# Leave the graph as we found it
|
276 |
+
if insertion:
|
277 |
+
G.remove_edge(u, v)
|
278 |
+
else:
|
279 |
+
G.add_edge(u, v)
|
280 |
+
|
281 |
+
return closeness_dict
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py
ADDED
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Current-flow betweenness centrality measures."""
|
2 |
+
import networkx as nx
|
3 |
+
from networkx.algorithms.centrality.flow_matrix import (
|
4 |
+
CGInverseLaplacian,
|
5 |
+
FullInverseLaplacian,
|
6 |
+
SuperLUInverseLaplacian,
|
7 |
+
flow_matrix_row,
|
8 |
+
)
|
9 |
+
from networkx.utils import (
|
10 |
+
not_implemented_for,
|
11 |
+
py_random_state,
|
12 |
+
reverse_cuthill_mckee_ordering,
|
13 |
+
)
|
14 |
+
|
15 |
+
__all__ = [
|
16 |
+
"current_flow_betweenness_centrality",
|
17 |
+
"approximate_current_flow_betweenness_centrality",
|
18 |
+
"edge_current_flow_betweenness_centrality",
|
19 |
+
]
|
20 |
+
|
21 |
+
|
22 |
+
@not_implemented_for("directed")
|
23 |
+
@py_random_state(7)
|
24 |
+
@nx._dispatchable(edge_attrs="weight")
|
25 |
+
def approximate_current_flow_betweenness_centrality(
|
26 |
+
G,
|
27 |
+
normalized=True,
|
28 |
+
weight=None,
|
29 |
+
dtype=float,
|
30 |
+
solver="full",
|
31 |
+
epsilon=0.5,
|
32 |
+
kmax=10000,
|
33 |
+
seed=None,
|
34 |
+
):
|
35 |
+
r"""Compute the approximate current-flow betweenness centrality for nodes.
|
36 |
+
|
37 |
+
Approximates the current-flow betweenness centrality within absolute
|
38 |
+
error of epsilon with high probability [1]_.
|
39 |
+
|
40 |
+
|
41 |
+
Parameters
|
42 |
+
----------
|
43 |
+
G : graph
|
44 |
+
A NetworkX graph
|
45 |
+
|
46 |
+
normalized : bool, optional (default=True)
|
47 |
+
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
48 |
+
n is the number of nodes in G.
|
49 |
+
|
50 |
+
weight : string or None, optional (default=None)
|
51 |
+
Key for edge data used as the edge weight.
|
52 |
+
If None, then use 1 as each edge weight.
|
53 |
+
The weight reflects the capacity or the strength of the
|
54 |
+
edge.
|
55 |
+
|
56 |
+
dtype : data type (float)
|
57 |
+
Default data type for internal matrices.
|
58 |
+
Set to np.float32 for lower memory consumption.
|
59 |
+
|
60 |
+
solver : string (default='full')
|
61 |
+
Type of linear solver to use for computing the flow matrix.
|
62 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
63 |
+
"cg" (uses least memory).
|
64 |
+
|
65 |
+
epsilon: float
|
66 |
+
Absolute error tolerance.
|
67 |
+
|
68 |
+
kmax: int
|
69 |
+
Maximum number of sample node pairs to use for approximation.
|
70 |
+
|
71 |
+
seed : integer, random_state, or None (default)
|
72 |
+
Indicator of random number generation state.
|
73 |
+
See :ref:`Randomness<randomness>`.
|
74 |
+
|
75 |
+
Returns
|
76 |
+
-------
|
77 |
+
nodes : dictionary
|
78 |
+
Dictionary of nodes with betweenness centrality as the value.
|
79 |
+
|
80 |
+
See Also
|
81 |
+
--------
|
82 |
+
current_flow_betweenness_centrality
|
83 |
+
|
84 |
+
Notes
|
85 |
+
-----
|
86 |
+
The running time is $O((1/\epsilon^2)m{\sqrt k} \log n)$
|
87 |
+
and the space required is $O(m)$ for $n$ nodes and $m$ edges.
|
88 |
+
|
89 |
+
If the edges have a 'weight' attribute they will be used as
|
90 |
+
weights in this algorithm. Unspecified weights are set to 1.
|
91 |
+
|
92 |
+
References
|
93 |
+
----------
|
94 |
+
.. [1] Ulrik Brandes and Daniel Fleischer:
|
95 |
+
Centrality Measures Based on Current Flow.
|
96 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
97 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
98 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
99 |
+
"""
|
100 |
+
import numpy as np
|
101 |
+
|
102 |
+
if not nx.is_connected(G):
|
103 |
+
raise nx.NetworkXError("Graph not connected.")
|
104 |
+
solvername = {
|
105 |
+
"full": FullInverseLaplacian,
|
106 |
+
"lu": SuperLUInverseLaplacian,
|
107 |
+
"cg": CGInverseLaplacian,
|
108 |
+
}
|
109 |
+
n = G.number_of_nodes()
|
110 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
111 |
+
# make a copy with integer labels according to rcm ordering
|
112 |
+
# this could be done without a copy if we really wanted to
|
113 |
+
H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
|
114 |
+
L = nx.laplacian_matrix(H, nodelist=range(n), weight=weight).asformat("csc")
|
115 |
+
L = L.astype(dtype)
|
116 |
+
C = solvername[solver](L, dtype=dtype) # initialize solver
|
117 |
+
betweenness = dict.fromkeys(H, 0.0)
|
118 |
+
nb = (n - 1.0) * (n - 2.0) # normalization factor
|
119 |
+
cstar = n * (n - 1) / nb
|
120 |
+
l = 1 # parameter in approximation, adjustable
|
121 |
+
k = l * int(np.ceil((cstar / epsilon) ** 2 * np.log(n)))
|
122 |
+
if k > kmax:
|
123 |
+
msg = f"Number random pairs k>kmax ({k}>{kmax}) "
|
124 |
+
raise nx.NetworkXError(msg, "Increase kmax or epsilon")
|
125 |
+
cstar2k = cstar / (2 * k)
|
126 |
+
for _ in range(k):
|
127 |
+
s, t = pair = seed.sample(range(n), 2)
|
128 |
+
b = np.zeros(n, dtype=dtype)
|
129 |
+
b[s] = 1
|
130 |
+
b[t] = -1
|
131 |
+
p = C.solve(b)
|
132 |
+
for v in H:
|
133 |
+
if v in pair:
|
134 |
+
continue
|
135 |
+
for nbr in H[v]:
|
136 |
+
w = H[v][nbr].get(weight, 1.0)
|
137 |
+
betweenness[v] += float(w * np.abs(p[v] - p[nbr]) * cstar2k)
|
138 |
+
if normalized:
|
139 |
+
factor = 1.0
|
140 |
+
else:
|
141 |
+
factor = nb / 2.0
|
142 |
+
# remap to original node names and "unnormalize" if required
|
143 |
+
return {ordering[k]: v * factor for k, v in betweenness.items()}
|
144 |
+
|
145 |
+
|
146 |
+
@not_implemented_for("directed")
|
147 |
+
@nx._dispatchable(edge_attrs="weight")
|
148 |
+
def current_flow_betweenness_centrality(
|
149 |
+
G, normalized=True, weight=None, dtype=float, solver="full"
|
150 |
+
):
|
151 |
+
r"""Compute current-flow betweenness centrality for nodes.
|
152 |
+
|
153 |
+
Current-flow betweenness centrality uses an electrical current
|
154 |
+
model for information spreading in contrast to betweenness
|
155 |
+
centrality which uses shortest paths.
|
156 |
+
|
157 |
+
Current-flow betweenness centrality is also known as
|
158 |
+
random-walk betweenness centrality [2]_.
|
159 |
+
|
160 |
+
Parameters
|
161 |
+
----------
|
162 |
+
G : graph
|
163 |
+
A NetworkX graph
|
164 |
+
|
165 |
+
normalized : bool, optional (default=True)
|
166 |
+
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
167 |
+
n is the number of nodes in G.
|
168 |
+
|
169 |
+
weight : string or None, optional (default=None)
|
170 |
+
Key for edge data used as the edge weight.
|
171 |
+
If None, then use 1 as each edge weight.
|
172 |
+
The weight reflects the capacity or the strength of the
|
173 |
+
edge.
|
174 |
+
|
175 |
+
dtype : data type (float)
|
176 |
+
Default data type for internal matrices.
|
177 |
+
Set to np.float32 for lower memory consumption.
|
178 |
+
|
179 |
+
solver : string (default='full')
|
180 |
+
Type of linear solver to use for computing the flow matrix.
|
181 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
182 |
+
"cg" (uses least memory).
|
183 |
+
|
184 |
+
Returns
|
185 |
+
-------
|
186 |
+
nodes : dictionary
|
187 |
+
Dictionary of nodes with betweenness centrality as the value.
|
188 |
+
|
189 |
+
See Also
|
190 |
+
--------
|
191 |
+
approximate_current_flow_betweenness_centrality
|
192 |
+
betweenness_centrality
|
193 |
+
edge_betweenness_centrality
|
194 |
+
edge_current_flow_betweenness_centrality
|
195 |
+
|
196 |
+
Notes
|
197 |
+
-----
|
198 |
+
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
199 |
+
time [1]_, where $I(n-1)$ is the time needed to compute the
|
200 |
+
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
201 |
+
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
202 |
+
Laplacian matrix condition number.
|
203 |
+
|
204 |
+
The space required is $O(nw)$ where $w$ is the width of the sparse
|
205 |
+
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
206 |
+
|
207 |
+
If the edges have a 'weight' attribute they will be used as
|
208 |
+
weights in this algorithm. Unspecified weights are set to 1.
|
209 |
+
|
210 |
+
References
|
211 |
+
----------
|
212 |
+
.. [1] Centrality Measures Based on Current Flow.
|
213 |
+
Ulrik Brandes and Daniel Fleischer,
|
214 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
215 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
216 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
217 |
+
|
218 |
+
.. [2] A measure of betweenness centrality based on random walks,
|
219 |
+
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
220 |
+
"""
|
221 |
+
if not nx.is_connected(G):
|
222 |
+
raise nx.NetworkXError("Graph not connected.")
|
223 |
+
N = G.number_of_nodes()
|
224 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
225 |
+
# make a copy with integer labels according to rcm ordering
|
226 |
+
# this could be done without a copy if we really wanted to
|
227 |
+
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
228 |
+
betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
|
229 |
+
for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
230 |
+
pos = dict(zip(row.argsort()[::-1], range(N)))
|
231 |
+
for i in range(N):
|
232 |
+
betweenness[s] += (i - pos[i]) * row.item(i)
|
233 |
+
betweenness[t] += (N - i - 1 - pos[i]) * row.item(i)
|
234 |
+
if normalized:
|
235 |
+
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
236 |
+
else:
|
237 |
+
nb = 2.0
|
238 |
+
return {ordering[n]: (b - n) * 2.0 / nb for n, b in betweenness.items()}
|
239 |
+
|
240 |
+
|
241 |
+
@not_implemented_for("directed")
|
242 |
+
@nx._dispatchable(edge_attrs="weight")
|
243 |
+
def edge_current_flow_betweenness_centrality(
|
244 |
+
G, normalized=True, weight=None, dtype=float, solver="full"
|
245 |
+
):
|
246 |
+
r"""Compute current-flow betweenness centrality for edges.
|
247 |
+
|
248 |
+
Current-flow betweenness centrality uses an electrical current
|
249 |
+
model for information spreading in contrast to betweenness
|
250 |
+
centrality which uses shortest paths.
|
251 |
+
|
252 |
+
Current-flow betweenness centrality is also known as
|
253 |
+
random-walk betweenness centrality [2]_.
|
254 |
+
|
255 |
+
Parameters
|
256 |
+
----------
|
257 |
+
G : graph
|
258 |
+
A NetworkX graph
|
259 |
+
|
260 |
+
normalized : bool, optional (default=True)
|
261 |
+
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
262 |
+
n is the number of nodes in G.
|
263 |
+
|
264 |
+
weight : string or None, optional (default=None)
|
265 |
+
Key for edge data used as the edge weight.
|
266 |
+
If None, then use 1 as each edge weight.
|
267 |
+
The weight reflects the capacity or the strength of the
|
268 |
+
edge.
|
269 |
+
|
270 |
+
dtype : data type (default=float)
|
271 |
+
Default data type for internal matrices.
|
272 |
+
Set to np.float32 for lower memory consumption.
|
273 |
+
|
274 |
+
solver : string (default='full')
|
275 |
+
Type of linear solver to use for computing the flow matrix.
|
276 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
277 |
+
"cg" (uses least memory).
|
278 |
+
|
279 |
+
Returns
|
280 |
+
-------
|
281 |
+
nodes : dictionary
|
282 |
+
Dictionary of edge tuples with betweenness centrality as the value.
|
283 |
+
|
284 |
+
Raises
|
285 |
+
------
|
286 |
+
NetworkXError
|
287 |
+
The algorithm does not support DiGraphs.
|
288 |
+
If the input graph is an instance of DiGraph class, NetworkXError
|
289 |
+
is raised.
|
290 |
+
|
291 |
+
See Also
|
292 |
+
--------
|
293 |
+
betweenness_centrality
|
294 |
+
edge_betweenness_centrality
|
295 |
+
current_flow_betweenness_centrality
|
296 |
+
|
297 |
+
Notes
|
298 |
+
-----
|
299 |
+
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
300 |
+
time [1]_, where $I(n-1)$ is the time needed to compute the
|
301 |
+
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
302 |
+
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
303 |
+
Laplacian matrix condition number.
|
304 |
+
|
305 |
+
The space required is $O(nw)$ where $w$ is the width of the sparse
|
306 |
+
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
307 |
+
|
308 |
+
If the edges have a 'weight' attribute they will be used as
|
309 |
+
weights in this algorithm. Unspecified weights are set to 1.
|
310 |
+
|
311 |
+
References
|
312 |
+
----------
|
313 |
+
.. [1] Centrality Measures Based on Current Flow.
|
314 |
+
Ulrik Brandes and Daniel Fleischer,
|
315 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
316 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
317 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
318 |
+
|
319 |
+
.. [2] A measure of betweenness centrality based on random walks,
|
320 |
+
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
321 |
+
"""
|
322 |
+
if not nx.is_connected(G):
|
323 |
+
raise nx.NetworkXError("Graph not connected.")
|
324 |
+
N = G.number_of_nodes()
|
325 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
326 |
+
# make a copy with integer labels according to rcm ordering
|
327 |
+
# this could be done without a copy if we really wanted to
|
328 |
+
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
329 |
+
edges = (tuple(sorted((u, v))) for u, v in H.edges())
|
330 |
+
betweenness = dict.fromkeys(edges, 0.0)
|
331 |
+
if normalized:
|
332 |
+
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
333 |
+
else:
|
334 |
+
nb = 2.0
|
335 |
+
for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
336 |
+
pos = dict(zip(row.argsort()[::-1], range(1, N + 1)))
|
337 |
+
for i in range(N):
|
338 |
+
betweenness[e] += (i + 1 - pos[i]) * row.item(i)
|
339 |
+
betweenness[e] += (N - i - pos[i]) * row.item(i)
|
340 |
+
betweenness[e] /= nb
|
341 |
+
return {(ordering[s], ordering[t]): b for (s, t), b in betweenness.items()}
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/current_flow_closeness.py
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Current-flow closeness centrality measures."""
|
2 |
+
import networkx as nx
|
3 |
+
from networkx.algorithms.centrality.flow_matrix import (
|
4 |
+
CGInverseLaplacian,
|
5 |
+
FullInverseLaplacian,
|
6 |
+
SuperLUInverseLaplacian,
|
7 |
+
)
|
8 |
+
from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
|
9 |
+
|
10 |
+
__all__ = ["current_flow_closeness_centrality", "information_centrality"]
|
11 |
+
|
12 |
+
|
13 |
+
@not_implemented_for("directed")
|
14 |
+
@nx._dispatchable(edge_attrs="weight")
|
15 |
+
def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"):
|
16 |
+
"""Compute current-flow closeness centrality for nodes.
|
17 |
+
|
18 |
+
Current-flow closeness centrality is variant of closeness
|
19 |
+
centrality based on effective resistance between nodes in
|
20 |
+
a network. This metric is also known as information centrality.
|
21 |
+
|
22 |
+
Parameters
|
23 |
+
----------
|
24 |
+
G : graph
|
25 |
+
A NetworkX graph.
|
26 |
+
|
27 |
+
weight : None or string, optional (default=None)
|
28 |
+
If None, all edge weights are considered equal.
|
29 |
+
Otherwise holds the name of the edge attribute used as weight.
|
30 |
+
The weight reflects the capacity or the strength of the
|
31 |
+
edge.
|
32 |
+
|
33 |
+
dtype: data type (default=float)
|
34 |
+
Default data type for internal matrices.
|
35 |
+
Set to np.float32 for lower memory consumption.
|
36 |
+
|
37 |
+
solver: string (default='lu')
|
38 |
+
Type of linear solver to use for computing the flow matrix.
|
39 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
40 |
+
"cg" (uses least memory).
|
41 |
+
|
42 |
+
Returns
|
43 |
+
-------
|
44 |
+
nodes : dictionary
|
45 |
+
Dictionary of nodes with current flow closeness centrality as the value.
|
46 |
+
|
47 |
+
See Also
|
48 |
+
--------
|
49 |
+
closeness_centrality
|
50 |
+
|
51 |
+
Notes
|
52 |
+
-----
|
53 |
+
The algorithm is from Brandes [1]_.
|
54 |
+
|
55 |
+
See also [2]_ for the original definition of information centrality.
|
56 |
+
|
57 |
+
References
|
58 |
+
----------
|
59 |
+
.. [1] Ulrik Brandes and Daniel Fleischer,
|
60 |
+
Centrality Measures Based on Current Flow.
|
61 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
62 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
63 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
64 |
+
|
65 |
+
.. [2] Karen Stephenson and Marvin Zelen:
|
66 |
+
Rethinking centrality: Methods and examples.
|
67 |
+
Social Networks 11(1):1-37, 1989.
|
68 |
+
https://doi.org/10.1016/0378-8733(89)90016-6
|
69 |
+
"""
|
70 |
+
if not nx.is_connected(G):
|
71 |
+
raise nx.NetworkXError("Graph not connected.")
|
72 |
+
solvername = {
|
73 |
+
"full": FullInverseLaplacian,
|
74 |
+
"lu": SuperLUInverseLaplacian,
|
75 |
+
"cg": CGInverseLaplacian,
|
76 |
+
}
|
77 |
+
N = G.number_of_nodes()
|
78 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
79 |
+
# make a copy with integer labels according to rcm ordering
|
80 |
+
# this could be done without a copy if we really wanted to
|
81 |
+
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
82 |
+
betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
|
83 |
+
N = H.number_of_nodes()
|
84 |
+
L = nx.laplacian_matrix(H, nodelist=range(N), weight=weight).asformat("csc")
|
85 |
+
L = L.astype(dtype)
|
86 |
+
C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver
|
87 |
+
for v in H:
|
88 |
+
col = C2.get_row(v)
|
89 |
+
for w in H:
|
90 |
+
betweenness[v] += col.item(v) - 2 * col.item(w)
|
91 |
+
betweenness[w] += col.item(v)
|
92 |
+
return {ordering[node]: 1 / value for node, value in betweenness.items()}
|
93 |
+
|
94 |
+
|
95 |
+
information_centrality = current_flow_closeness_centrality
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/degree_alg.py
ADDED
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Degree centrality measures."""
|
2 |
+
import networkx as nx
|
3 |
+
from networkx.utils.decorators import not_implemented_for
|
4 |
+
|
5 |
+
__all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"]
|
6 |
+
|
7 |
+
|
8 |
+
@nx._dispatchable
|
9 |
+
def degree_centrality(G):
|
10 |
+
"""Compute the degree centrality for nodes.
|
11 |
+
|
12 |
+
The degree centrality for a node v is the fraction of nodes it
|
13 |
+
is connected to.
|
14 |
+
|
15 |
+
Parameters
|
16 |
+
----------
|
17 |
+
G : graph
|
18 |
+
A networkx graph
|
19 |
+
|
20 |
+
Returns
|
21 |
+
-------
|
22 |
+
nodes : dictionary
|
23 |
+
Dictionary of nodes with degree centrality as the value.
|
24 |
+
|
25 |
+
Examples
|
26 |
+
--------
|
27 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
28 |
+
>>> nx.degree_centrality(G)
|
29 |
+
{0: 1.0, 1: 1.0, 2: 0.6666666666666666, 3: 0.6666666666666666}
|
30 |
+
|
31 |
+
See Also
|
32 |
+
--------
|
33 |
+
betweenness_centrality, load_centrality, eigenvector_centrality
|
34 |
+
|
35 |
+
Notes
|
36 |
+
-----
|
37 |
+
The degree centrality values are normalized by dividing by the maximum
|
38 |
+
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
39 |
+
|
40 |
+
For multigraphs or graphs with self loops the maximum degree might
|
41 |
+
be higher than n-1 and values of degree centrality greater than 1
|
42 |
+
are possible.
|
43 |
+
"""
|
44 |
+
if len(G) <= 1:
|
45 |
+
return {n: 1 for n in G}
|
46 |
+
|
47 |
+
s = 1.0 / (len(G) - 1.0)
|
48 |
+
centrality = {n: d * s for n, d in G.degree()}
|
49 |
+
return centrality
|
50 |
+
|
51 |
+
|
52 |
+
@not_implemented_for("undirected")
|
53 |
+
@nx._dispatchable
|
54 |
+
def in_degree_centrality(G):
|
55 |
+
"""Compute the in-degree centrality for nodes.
|
56 |
+
|
57 |
+
The in-degree centrality for a node v is the fraction of nodes its
|
58 |
+
incoming edges are connected to.
|
59 |
+
|
60 |
+
Parameters
|
61 |
+
----------
|
62 |
+
G : graph
|
63 |
+
A NetworkX graph
|
64 |
+
|
65 |
+
Returns
|
66 |
+
-------
|
67 |
+
nodes : dictionary
|
68 |
+
Dictionary of nodes with in-degree centrality as values.
|
69 |
+
|
70 |
+
Raises
|
71 |
+
------
|
72 |
+
NetworkXNotImplemented
|
73 |
+
If G is undirected.
|
74 |
+
|
75 |
+
Examples
|
76 |
+
--------
|
77 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
78 |
+
>>> nx.in_degree_centrality(G)
|
79 |
+
{0: 0.0, 1: 0.3333333333333333, 2: 0.6666666666666666, 3: 0.6666666666666666}
|
80 |
+
|
81 |
+
See Also
|
82 |
+
--------
|
83 |
+
degree_centrality, out_degree_centrality
|
84 |
+
|
85 |
+
Notes
|
86 |
+
-----
|
87 |
+
The degree centrality values are normalized by dividing by the maximum
|
88 |
+
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
89 |
+
|
90 |
+
For multigraphs or graphs with self loops the maximum degree might
|
91 |
+
be higher than n-1 and values of degree centrality greater than 1
|
92 |
+
are possible.
|
93 |
+
"""
|
94 |
+
if len(G) <= 1:
|
95 |
+
return {n: 1 for n in G}
|
96 |
+
|
97 |
+
s = 1.0 / (len(G) - 1.0)
|
98 |
+
centrality = {n: d * s for n, d in G.in_degree()}
|
99 |
+
return centrality
|
100 |
+
|
101 |
+
|
102 |
+
@not_implemented_for("undirected")
|
103 |
+
@nx._dispatchable
|
104 |
+
def out_degree_centrality(G):
|
105 |
+
"""Compute the out-degree centrality for nodes.
|
106 |
+
|
107 |
+
The out-degree centrality for a node v is the fraction of nodes its
|
108 |
+
outgoing edges are connected to.
|
109 |
+
|
110 |
+
Parameters
|
111 |
+
----------
|
112 |
+
G : graph
|
113 |
+
A NetworkX graph
|
114 |
+
|
115 |
+
Returns
|
116 |
+
-------
|
117 |
+
nodes : dictionary
|
118 |
+
Dictionary of nodes with out-degree centrality as values.
|
119 |
+
|
120 |
+
Raises
|
121 |
+
------
|
122 |
+
NetworkXNotImplemented
|
123 |
+
If G is undirected.
|
124 |
+
|
125 |
+
Examples
|
126 |
+
--------
|
127 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
128 |
+
>>> nx.out_degree_centrality(G)
|
129 |
+
{0: 1.0, 1: 0.6666666666666666, 2: 0.0, 3: 0.0}
|
130 |
+
|
131 |
+
See Also
|
132 |
+
--------
|
133 |
+
degree_centrality, in_degree_centrality
|
134 |
+
|
135 |
+
Notes
|
136 |
+
-----
|
137 |
+
The degree centrality values are normalized by dividing by the maximum
|
138 |
+
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
139 |
+
|
140 |
+
For multigraphs or graphs with self loops the maximum degree might
|
141 |
+
be higher than n-1 and values of degree centrality greater than 1
|
142 |
+
are possible.
|
143 |
+
"""
|
144 |
+
if len(G) <= 1:
|
145 |
+
return {n: 1 for n in G}
|
146 |
+
|
147 |
+
s = 1.0 / (len(G) - 1.0)
|
148 |
+
centrality = {n: d * s for n, d in G.out_degree()}
|
149 |
+
return centrality
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/group.py
ADDED
@@ -0,0 +1,786 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Group centrality measures."""
|
2 |
+
from copy import deepcopy
|
3 |
+
|
4 |
+
import networkx as nx
|
5 |
+
from networkx.algorithms.centrality.betweenness import (
|
6 |
+
_accumulate_endpoints,
|
7 |
+
_single_source_dijkstra_path_basic,
|
8 |
+
_single_source_shortest_path_basic,
|
9 |
+
)
|
10 |
+
from networkx.utils.decorators import not_implemented_for
|
11 |
+
|
12 |
+
__all__ = [
|
13 |
+
"group_betweenness_centrality",
|
14 |
+
"group_closeness_centrality",
|
15 |
+
"group_degree_centrality",
|
16 |
+
"group_in_degree_centrality",
|
17 |
+
"group_out_degree_centrality",
|
18 |
+
"prominent_group",
|
19 |
+
]
|
20 |
+
|
21 |
+
|
22 |
+
@nx._dispatchable(edge_attrs="weight")
|
23 |
+
def group_betweenness_centrality(G, C, normalized=True, weight=None, endpoints=False):
|
24 |
+
r"""Compute the group betweenness centrality for a group of nodes.
|
25 |
+
|
26 |
+
Group betweenness centrality of a group of nodes $C$ is the sum of the
|
27 |
+
fraction of all-pairs shortest paths that pass through any vertex in $C$
|
28 |
+
|
29 |
+
.. math::
|
30 |
+
|
31 |
+
c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
32 |
+
|
33 |
+
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
34 |
+
shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
|
35 |
+
those paths passing through some node in group $C$. Note that
|
36 |
+
$(s, t)$ are not members of the group ($V-C$ is the set of nodes
|
37 |
+
in $V$ that are not in $C$).
|
38 |
+
|
39 |
+
Parameters
|
40 |
+
----------
|
41 |
+
G : graph
|
42 |
+
A NetworkX graph.
|
43 |
+
|
44 |
+
C : list or set or list of lists or list of sets
|
45 |
+
A group or a list of groups containing nodes which belong to G, for which group betweenness
|
46 |
+
centrality is to be calculated.
|
47 |
+
|
48 |
+
normalized : bool, optional (default=True)
|
49 |
+
If True, group betweenness is normalized by `1/((|V|-|C|)(|V|-|C|-1))`
|
50 |
+
where `|V|` is the number of nodes in G and `|C|` is the number of nodes in C.
|
51 |
+
|
52 |
+
weight : None or string, optional (default=None)
|
53 |
+
If None, all edge weights are considered equal.
|
54 |
+
Otherwise holds the name of the edge attribute used as weight.
|
55 |
+
The weight of an edge is treated as the length or distance between the two sides.
|
56 |
+
|
57 |
+
endpoints : bool, optional (default=False)
|
58 |
+
If True include the endpoints in the shortest path counts.
|
59 |
+
|
60 |
+
Raises
|
61 |
+
------
|
62 |
+
NodeNotFound
|
63 |
+
If node(s) in C are not present in G.
|
64 |
+
|
65 |
+
Returns
|
66 |
+
-------
|
67 |
+
betweenness : list of floats or float
|
68 |
+
If C is a single group then return a float. If C is a list with
|
69 |
+
several groups then return a list of group betweenness centralities.
|
70 |
+
|
71 |
+
See Also
|
72 |
+
--------
|
73 |
+
betweenness_centrality
|
74 |
+
|
75 |
+
Notes
|
76 |
+
-----
|
77 |
+
Group betweenness centrality is described in [1]_ and its importance discussed in [3]_.
|
78 |
+
The initial implementation of the algorithm is mentioned in [2]_. This function uses
|
79 |
+
an improved algorithm presented in [4]_.
|
80 |
+
|
81 |
+
The number of nodes in the group must be a maximum of n - 2 where `n`
|
82 |
+
is the total number of nodes in the graph.
|
83 |
+
|
84 |
+
For weighted graphs the edge weights must be greater than zero.
|
85 |
+
Zero edge weights can produce an infinite number of equal length
|
86 |
+
paths between pairs of nodes.
|
87 |
+
|
88 |
+
The total number of paths between source and target is counted
|
89 |
+
differently for directed and undirected graphs. Directed paths
|
90 |
+
between "u" and "v" are counted as two possible paths (one each
|
91 |
+
direction) while undirected paths between "u" and "v" are counted
|
92 |
+
as one path. Said another way, the sum in the expression above is
|
93 |
+
over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs.
|
94 |
+
|
95 |
+
|
96 |
+
References
|
97 |
+
----------
|
98 |
+
.. [1] M G Everett and S P Borgatti:
|
99 |
+
The Centrality of Groups and Classes.
|
100 |
+
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
101 |
+
http://www.analytictech.com/borgatti/group_centrality.htm
|
102 |
+
.. [2] Ulrik Brandes:
|
103 |
+
On Variants of Shortest-Path Betweenness
|
104 |
+
Centrality and their Generic Computation.
|
105 |
+
Social Networks 30(2):136-145, 2008.
|
106 |
+
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.72.9610&rep=rep1&type=pdf
|
107 |
+
.. [3] Sourav Medya et. al.:
|
108 |
+
Group Centrality Maximization via Network Design.
|
109 |
+
SIAM International Conference on Data Mining, SDM 2018, 126–134.
|
110 |
+
https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
|
111 |
+
.. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev.
|
112 |
+
"Fast algorithm for successive computation of group betweenness centrality."
|
113 |
+
https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709
|
114 |
+
|
115 |
+
"""
|
116 |
+
GBC = [] # initialize betweenness
|
117 |
+
list_of_groups = True
|
118 |
+
# check weather C contains one or many groups
|
119 |
+
if any(el in G for el in C):
|
120 |
+
C = [C]
|
121 |
+
list_of_groups = False
|
122 |
+
set_v = {node for group in C for node in group}
|
123 |
+
if set_v - G.nodes: # element(s) of C not in G
|
124 |
+
raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are in C but not in G.")
|
125 |
+
|
126 |
+
# pre-processing
|
127 |
+
PB, sigma, D = _group_preprocessing(G, set_v, weight)
|
128 |
+
|
129 |
+
# the algorithm for each group
|
130 |
+
for group in C:
|
131 |
+
group = set(group) # set of nodes in group
|
132 |
+
# initialize the matrices of the sigma and the PB
|
133 |
+
GBC_group = 0
|
134 |
+
sigma_m = deepcopy(sigma)
|
135 |
+
PB_m = deepcopy(PB)
|
136 |
+
sigma_m_v = deepcopy(sigma_m)
|
137 |
+
PB_m_v = deepcopy(PB_m)
|
138 |
+
for v in group:
|
139 |
+
GBC_group += PB_m[v][v]
|
140 |
+
for x in group:
|
141 |
+
for y in group:
|
142 |
+
dxvy = 0
|
143 |
+
dxyv = 0
|
144 |
+
dvxy = 0
|
145 |
+
if not (
|
146 |
+
sigma_m[x][y] == 0 or sigma_m[x][v] == 0 or sigma_m[v][y] == 0
|
147 |
+
):
|
148 |
+
if D[x][v] == D[x][y] + D[y][v]:
|
149 |
+
dxyv = sigma_m[x][y] * sigma_m[y][v] / sigma_m[x][v]
|
150 |
+
if D[x][y] == D[x][v] + D[v][y]:
|
151 |
+
dxvy = sigma_m[x][v] * sigma_m[v][y] / sigma_m[x][y]
|
152 |
+
if D[v][y] == D[v][x] + D[x][y]:
|
153 |
+
dvxy = sigma_m[v][x] * sigma[x][y] / sigma[v][y]
|
154 |
+
sigma_m_v[x][y] = sigma_m[x][y] * (1 - dxvy)
|
155 |
+
PB_m_v[x][y] = PB_m[x][y] - PB_m[x][y] * dxvy
|
156 |
+
if y != v:
|
157 |
+
PB_m_v[x][y] -= PB_m[x][v] * dxyv
|
158 |
+
if x != v:
|
159 |
+
PB_m_v[x][y] -= PB_m[v][y] * dvxy
|
160 |
+
sigma_m, sigma_m_v = sigma_m_v, sigma_m
|
161 |
+
PB_m, PB_m_v = PB_m_v, PB_m
|
162 |
+
|
163 |
+
# endpoints
|
164 |
+
v, c = len(G), len(group)
|
165 |
+
if not endpoints:
|
166 |
+
scale = 0
|
167 |
+
# if the graph is connected then subtract the endpoints from
|
168 |
+
# the count for all the nodes in the graph. else count how many
|
169 |
+
# nodes are connected to the group's nodes and subtract that.
|
170 |
+
if nx.is_directed(G):
|
171 |
+
if nx.is_strongly_connected(G):
|
172 |
+
scale = c * (2 * v - c - 1)
|
173 |
+
elif nx.is_connected(G):
|
174 |
+
scale = c * (2 * v - c - 1)
|
175 |
+
if scale == 0:
|
176 |
+
for group_node1 in group:
|
177 |
+
for node in D[group_node1]:
|
178 |
+
if node != group_node1:
|
179 |
+
if node in group:
|
180 |
+
scale += 1
|
181 |
+
else:
|
182 |
+
scale += 2
|
183 |
+
GBC_group -= scale
|
184 |
+
|
185 |
+
# normalized
|
186 |
+
if normalized:
|
187 |
+
scale = 1 / ((v - c) * (v - c - 1))
|
188 |
+
GBC_group *= scale
|
189 |
+
|
190 |
+
# If undirected than count only the undirected edges
|
191 |
+
elif not G.is_directed():
|
192 |
+
GBC_group /= 2
|
193 |
+
|
194 |
+
GBC.append(GBC_group)
|
195 |
+
if list_of_groups:
|
196 |
+
return GBC
|
197 |
+
return GBC[0]
|
198 |
+
|
199 |
+
|
200 |
+
def _group_preprocessing(G, set_v, weight):
|
201 |
+
sigma = {}
|
202 |
+
delta = {}
|
203 |
+
D = {}
|
204 |
+
betweenness = dict.fromkeys(G, 0)
|
205 |
+
for s in G:
|
206 |
+
if weight is None: # use BFS
|
207 |
+
S, P, sigma[s], D[s] = _single_source_shortest_path_basic(G, s)
|
208 |
+
else: # use Dijkstra's algorithm
|
209 |
+
S, P, sigma[s], D[s] = _single_source_dijkstra_path_basic(G, s, weight)
|
210 |
+
betweenness, delta[s] = _accumulate_endpoints(betweenness, S, P, sigma[s], s)
|
211 |
+
for i in delta[s]: # add the paths from s to i and rescale sigma
|
212 |
+
if s != i:
|
213 |
+
delta[s][i] += 1
|
214 |
+
if weight is not None:
|
215 |
+
sigma[s][i] = sigma[s][i] / 2
|
216 |
+
# building the path betweenness matrix only for nodes that appear in the group
|
217 |
+
PB = dict.fromkeys(G)
|
218 |
+
for group_node1 in set_v:
|
219 |
+
PB[group_node1] = dict.fromkeys(G, 0.0)
|
220 |
+
for group_node2 in set_v:
|
221 |
+
if group_node2 not in D[group_node1]:
|
222 |
+
continue
|
223 |
+
for node in G:
|
224 |
+
# if node is connected to the two group nodes than continue
|
225 |
+
if group_node2 in D[node] and group_node1 in D[node]:
|
226 |
+
if (
|
227 |
+
D[node][group_node2]
|
228 |
+
== D[node][group_node1] + D[group_node1][group_node2]
|
229 |
+
):
|
230 |
+
PB[group_node1][group_node2] += (
|
231 |
+
delta[node][group_node2]
|
232 |
+
* sigma[node][group_node1]
|
233 |
+
* sigma[group_node1][group_node2]
|
234 |
+
/ sigma[node][group_node2]
|
235 |
+
)
|
236 |
+
return PB, sigma, D
|
237 |
+
|
238 |
+
|
239 |
+
@nx._dispatchable(edge_attrs="weight")
|
240 |
+
def prominent_group(
|
241 |
+
G, k, weight=None, C=None, endpoints=False, normalized=True, greedy=False
|
242 |
+
):
|
243 |
+
r"""Find the prominent group of size $k$ in graph $G$. The prominence of the
|
244 |
+
group is evaluated by the group betweenness centrality.
|
245 |
+
|
246 |
+
Group betweenness centrality of a group of nodes $C$ is the sum of the
|
247 |
+
fraction of all-pairs shortest paths that pass through any vertex in $C$
|
248 |
+
|
249 |
+
.. math::
|
250 |
+
|
251 |
+
c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
252 |
+
|
253 |
+
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
254 |
+
shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
|
255 |
+
those paths passing through some node in group $C$. Note that
|
256 |
+
$(s, t)$ are not members of the group ($V-C$ is the set of nodes
|
257 |
+
in $V$ that are not in $C$).
|
258 |
+
|
259 |
+
Parameters
|
260 |
+
----------
|
261 |
+
G : graph
|
262 |
+
A NetworkX graph.
|
263 |
+
|
264 |
+
k : int
|
265 |
+
The number of nodes in the group.
|
266 |
+
|
267 |
+
normalized : bool, optional (default=True)
|
268 |
+
If True, group betweenness is normalized by ``1/((|V|-|C|)(|V|-|C|-1))``
|
269 |
+
where ``|V|`` is the number of nodes in G and ``|C|`` is the number of
|
270 |
+
nodes in C.
|
271 |
+
|
272 |
+
weight : None or string, optional (default=None)
|
273 |
+
If None, all edge weights are considered equal.
|
274 |
+
Otherwise holds the name of the edge attribute used as weight.
|
275 |
+
The weight of an edge is treated as the length or distance between the two sides.
|
276 |
+
|
277 |
+
endpoints : bool, optional (default=False)
|
278 |
+
If True include the endpoints in the shortest path counts.
|
279 |
+
|
280 |
+
C : list or set, optional (default=None)
|
281 |
+
list of nodes which won't be candidates of the prominent group.
|
282 |
+
|
283 |
+
greedy : bool, optional (default=False)
|
284 |
+
Using a naive greedy algorithm in order to find non-optimal prominent
|
285 |
+
group. For scale free networks the results are negligibly below the optimal
|
286 |
+
results.
|
287 |
+
|
288 |
+
Raises
|
289 |
+
------
|
290 |
+
NodeNotFound
|
291 |
+
If node(s) in C are not present in G.
|
292 |
+
|
293 |
+
Returns
|
294 |
+
-------
|
295 |
+
max_GBC : float
|
296 |
+
The group betweenness centrality of the prominent group.
|
297 |
+
|
298 |
+
max_group : list
|
299 |
+
The list of nodes in the prominent group.
|
300 |
+
|
301 |
+
See Also
|
302 |
+
--------
|
303 |
+
betweenness_centrality, group_betweenness_centrality
|
304 |
+
|
305 |
+
Notes
|
306 |
+
-----
|
307 |
+
Group betweenness centrality is described in [1]_ and its importance discussed in [3]_.
|
308 |
+
The algorithm is described in [2]_ and is based on techniques mentioned in [4]_.
|
309 |
+
|
310 |
+
The number of nodes in the group must be a maximum of ``n - 2`` where ``n``
|
311 |
+
is the total number of nodes in the graph.
|
312 |
+
|
313 |
+
For weighted graphs the edge weights must be greater than zero.
|
314 |
+
Zero edge weights can produce an infinite number of equal length
|
315 |
+
paths between pairs of nodes.
|
316 |
+
|
317 |
+
The total number of paths between source and target is counted
|
318 |
+
differently for directed and undirected graphs. Directed paths
|
319 |
+
between "u" and "v" are counted as two possible paths (one each
|
320 |
+
direction) while undirected paths between "u" and "v" are counted
|
321 |
+
as one path. Said another way, the sum in the expression above is
|
322 |
+
over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs.
|
323 |
+
|
324 |
+
References
|
325 |
+
----------
|
326 |
+
.. [1] M G Everett and S P Borgatti:
|
327 |
+
The Centrality of Groups and Classes.
|
328 |
+
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
329 |
+
http://www.analytictech.com/borgatti/group_centrality.htm
|
330 |
+
.. [2] Rami Puzis, Yuval Elovici, and Shlomi Dolev:
|
331 |
+
"Finding the Most Prominent Group in Complex Networks"
|
332 |
+
AI communications 20(4): 287-296, 2007.
|
333 |
+
https://www.researchgate.net/profile/Rami_Puzis2/publication/220308855
|
334 |
+
.. [3] Sourav Medya et. al.:
|
335 |
+
Group Centrality Maximization via Network Design.
|
336 |
+
SIAM International Conference on Data Mining, SDM 2018, 126–134.
|
337 |
+
https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
|
338 |
+
.. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev.
|
339 |
+
"Fast algorithm for successive computation of group betweenness centrality."
|
340 |
+
https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709
|
341 |
+
"""
|
342 |
+
import numpy as np
|
343 |
+
import pandas as pd
|
344 |
+
|
345 |
+
if C is not None:
|
346 |
+
C = set(C)
|
347 |
+
if C - G.nodes: # element(s) of C not in G
|
348 |
+
raise nx.NodeNotFound(f"The node(s) {C - G.nodes} are in C but not in G.")
|
349 |
+
nodes = list(G.nodes - C)
|
350 |
+
else:
|
351 |
+
nodes = list(G.nodes)
|
352 |
+
DF_tree = nx.Graph()
|
353 |
+
DF_tree.__networkx_cache__ = None # Disable caching
|
354 |
+
PB, sigma, D = _group_preprocessing(G, nodes, weight)
|
355 |
+
betweenness = pd.DataFrame.from_dict(PB)
|
356 |
+
if C is not None:
|
357 |
+
for node in C:
|
358 |
+
# remove from the betweenness all the nodes not part of the group
|
359 |
+
betweenness.drop(index=node, inplace=True)
|
360 |
+
betweenness.drop(columns=node, inplace=True)
|
361 |
+
CL = [node for _, node in sorted(zip(np.diag(betweenness), nodes), reverse=True)]
|
362 |
+
max_GBC = 0
|
363 |
+
max_group = []
|
364 |
+
DF_tree.add_node(
|
365 |
+
1,
|
366 |
+
CL=CL,
|
367 |
+
betweenness=betweenness,
|
368 |
+
GBC=0,
|
369 |
+
GM=[],
|
370 |
+
sigma=sigma,
|
371 |
+
cont=dict(zip(nodes, np.diag(betweenness))),
|
372 |
+
)
|
373 |
+
|
374 |
+
# the algorithm
|
375 |
+
DF_tree.nodes[1]["heu"] = 0
|
376 |
+
for i in range(k):
|
377 |
+
DF_tree.nodes[1]["heu"] += DF_tree.nodes[1]["cont"][DF_tree.nodes[1]["CL"][i]]
|
378 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
379 |
+
G, k, DF_tree, max_GBC, 1, D, max_group, nodes, greedy
|
380 |
+
)
|
381 |
+
|
382 |
+
v = len(G)
|
383 |
+
if not endpoints:
|
384 |
+
scale = 0
|
385 |
+
# if the graph is connected then subtract the endpoints from
|
386 |
+
# the count for all the nodes in the graph. else count how many
|
387 |
+
# nodes are connected to the group's nodes and subtract that.
|
388 |
+
if nx.is_directed(G):
|
389 |
+
if nx.is_strongly_connected(G):
|
390 |
+
scale = k * (2 * v - k - 1)
|
391 |
+
elif nx.is_connected(G):
|
392 |
+
scale = k * (2 * v - k - 1)
|
393 |
+
if scale == 0:
|
394 |
+
for group_node1 in max_group:
|
395 |
+
for node in D[group_node1]:
|
396 |
+
if node != group_node1:
|
397 |
+
if node in max_group:
|
398 |
+
scale += 1
|
399 |
+
else:
|
400 |
+
scale += 2
|
401 |
+
max_GBC -= scale
|
402 |
+
|
403 |
+
# normalized
|
404 |
+
if normalized:
|
405 |
+
scale = 1 / ((v - k) * (v - k - 1))
|
406 |
+
max_GBC *= scale
|
407 |
+
|
408 |
+
# If undirected then count only the undirected edges
|
409 |
+
elif not G.is_directed():
|
410 |
+
max_GBC /= 2
|
411 |
+
max_GBC = float("%.2f" % max_GBC)
|
412 |
+
return max_GBC, max_group
|
413 |
+
|
414 |
+
|
415 |
+
def _dfbnb(G, k, DF_tree, max_GBC, root, D, max_group, nodes, greedy):
|
416 |
+
# stopping condition - if we found a group of size k and with higher GBC then prune
|
417 |
+
if len(DF_tree.nodes[root]["GM"]) == k and DF_tree.nodes[root]["GBC"] > max_GBC:
|
418 |
+
return DF_tree.nodes[root]["GBC"], DF_tree, DF_tree.nodes[root]["GM"]
|
419 |
+
# stopping condition - if the size of group members equal to k or there are less than
|
420 |
+
# k - |GM| in the candidate list or the heuristic function plus the GBC is below the
|
421 |
+
# maximal GBC found then prune
|
422 |
+
if (
|
423 |
+
len(DF_tree.nodes[root]["GM"]) == k
|
424 |
+
or len(DF_tree.nodes[root]["CL"]) <= k - len(DF_tree.nodes[root]["GM"])
|
425 |
+
or DF_tree.nodes[root]["GBC"] + DF_tree.nodes[root]["heu"] <= max_GBC
|
426 |
+
):
|
427 |
+
return max_GBC, DF_tree, max_group
|
428 |
+
|
429 |
+
# finding the heuristic of both children
|
430 |
+
node_p, node_m, DF_tree = _heuristic(k, root, DF_tree, D, nodes, greedy)
|
431 |
+
|
432 |
+
# finding the child with the bigger heuristic + GBC and expand
|
433 |
+
# that node first if greedy then only expand the plus node
|
434 |
+
if greedy:
|
435 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
436 |
+
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
437 |
+
)
|
438 |
+
|
439 |
+
elif (
|
440 |
+
DF_tree.nodes[node_p]["GBC"] + DF_tree.nodes[node_p]["heu"]
|
441 |
+
> DF_tree.nodes[node_m]["GBC"] + DF_tree.nodes[node_m]["heu"]
|
442 |
+
):
|
443 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
444 |
+
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
445 |
+
)
|
446 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
447 |
+
G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy
|
448 |
+
)
|
449 |
+
else:
|
450 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
451 |
+
G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy
|
452 |
+
)
|
453 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
454 |
+
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
455 |
+
)
|
456 |
+
return max_GBC, DF_tree, max_group
|
457 |
+
|
458 |
+
|
459 |
+
def _heuristic(k, root, DF_tree, D, nodes, greedy):
|
460 |
+
import numpy as np
|
461 |
+
|
462 |
+
# This helper function add two nodes to DF_tree - one left son and the
|
463 |
+
# other right son, finds their heuristic, CL, GBC, and GM
|
464 |
+
node_p = DF_tree.number_of_nodes() + 1
|
465 |
+
node_m = DF_tree.number_of_nodes() + 2
|
466 |
+
added_node = DF_tree.nodes[root]["CL"][0]
|
467 |
+
|
468 |
+
# adding the plus node
|
469 |
+
DF_tree.add_nodes_from([(node_p, deepcopy(DF_tree.nodes[root]))])
|
470 |
+
DF_tree.nodes[node_p]["GM"].append(added_node)
|
471 |
+
DF_tree.nodes[node_p]["GBC"] += DF_tree.nodes[node_p]["cont"][added_node]
|
472 |
+
root_node = DF_tree.nodes[root]
|
473 |
+
for x in nodes:
|
474 |
+
for y in nodes:
|
475 |
+
dxvy = 0
|
476 |
+
dxyv = 0
|
477 |
+
dvxy = 0
|
478 |
+
if not (
|
479 |
+
root_node["sigma"][x][y] == 0
|
480 |
+
or root_node["sigma"][x][added_node] == 0
|
481 |
+
or root_node["sigma"][added_node][y] == 0
|
482 |
+
):
|
483 |
+
if D[x][added_node] == D[x][y] + D[y][added_node]:
|
484 |
+
dxyv = (
|
485 |
+
root_node["sigma"][x][y]
|
486 |
+
* root_node["sigma"][y][added_node]
|
487 |
+
/ root_node["sigma"][x][added_node]
|
488 |
+
)
|
489 |
+
if D[x][y] == D[x][added_node] + D[added_node][y]:
|
490 |
+
dxvy = (
|
491 |
+
root_node["sigma"][x][added_node]
|
492 |
+
* root_node["sigma"][added_node][y]
|
493 |
+
/ root_node["sigma"][x][y]
|
494 |
+
)
|
495 |
+
if D[added_node][y] == D[added_node][x] + D[x][y]:
|
496 |
+
dvxy = (
|
497 |
+
root_node["sigma"][added_node][x]
|
498 |
+
* root_node["sigma"][x][y]
|
499 |
+
/ root_node["sigma"][added_node][y]
|
500 |
+
)
|
501 |
+
DF_tree.nodes[node_p]["sigma"][x][y] = root_node["sigma"][x][y] * (1 - dxvy)
|
502 |
+
DF_tree.nodes[node_p]["betweenness"].loc[y, x] = (
|
503 |
+
root_node["betweenness"][x][y] - root_node["betweenness"][x][y] * dxvy
|
504 |
+
)
|
505 |
+
if y != added_node:
|
506 |
+
DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= (
|
507 |
+
root_node["betweenness"][x][added_node] * dxyv
|
508 |
+
)
|
509 |
+
if x != added_node:
|
510 |
+
DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= (
|
511 |
+
root_node["betweenness"][added_node][y] * dvxy
|
512 |
+
)
|
513 |
+
|
514 |
+
DF_tree.nodes[node_p]["CL"] = [
|
515 |
+
node
|
516 |
+
for _, node in sorted(
|
517 |
+
zip(np.diag(DF_tree.nodes[node_p]["betweenness"]), nodes), reverse=True
|
518 |
+
)
|
519 |
+
if node not in DF_tree.nodes[node_p]["GM"]
|
520 |
+
]
|
521 |
+
DF_tree.nodes[node_p]["cont"] = dict(
|
522 |
+
zip(nodes, np.diag(DF_tree.nodes[node_p]["betweenness"]))
|
523 |
+
)
|
524 |
+
DF_tree.nodes[node_p]["heu"] = 0
|
525 |
+
for i in range(k - len(DF_tree.nodes[node_p]["GM"])):
|
526 |
+
DF_tree.nodes[node_p]["heu"] += DF_tree.nodes[node_p]["cont"][
|
527 |
+
DF_tree.nodes[node_p]["CL"][i]
|
528 |
+
]
|
529 |
+
|
530 |
+
# adding the minus node - don't insert the first node in the CL to GM
|
531 |
+
# Insert minus node only if isn't greedy type algorithm
|
532 |
+
if not greedy:
|
533 |
+
DF_tree.add_nodes_from([(node_m, deepcopy(DF_tree.nodes[root]))])
|
534 |
+
DF_tree.nodes[node_m]["CL"].pop(0)
|
535 |
+
DF_tree.nodes[node_m]["cont"].pop(added_node)
|
536 |
+
DF_tree.nodes[node_m]["heu"] = 0
|
537 |
+
for i in range(k - len(DF_tree.nodes[node_m]["GM"])):
|
538 |
+
DF_tree.nodes[node_m]["heu"] += DF_tree.nodes[node_m]["cont"][
|
539 |
+
DF_tree.nodes[node_m]["CL"][i]
|
540 |
+
]
|
541 |
+
else:
|
542 |
+
node_m = None
|
543 |
+
|
544 |
+
return node_p, node_m, DF_tree
|
545 |
+
|
546 |
+
|
547 |
+
@nx._dispatchable(edge_attrs="weight")
|
548 |
+
def group_closeness_centrality(G, S, weight=None):
|
549 |
+
r"""Compute the group closeness centrality for a group of nodes.
|
550 |
+
|
551 |
+
Group closeness centrality of a group of nodes $S$ is a measure
|
552 |
+
of how close the group is to the other nodes in the graph.
|
553 |
+
|
554 |
+
.. math::
|
555 |
+
|
556 |
+
c_{close}(S) = \frac{|V-S|}{\sum_{v \in V-S} d_{S, v}}
|
557 |
+
|
558 |
+
d_{S, v} = min_{u \in S} (d_{u, v})
|
559 |
+
|
560 |
+
where $V$ is the set of nodes, $d_{S, v}$ is the distance of
|
561 |
+
the group $S$ from $v$ defined as above. ($V-S$ is the set of nodes
|
562 |
+
in $V$ that are not in $S$).
|
563 |
+
|
564 |
+
Parameters
|
565 |
+
----------
|
566 |
+
G : graph
|
567 |
+
A NetworkX graph.
|
568 |
+
|
569 |
+
S : list or set
|
570 |
+
S is a group of nodes which belong to G, for which group closeness
|
571 |
+
centrality is to be calculated.
|
572 |
+
|
573 |
+
weight : None or string, optional (default=None)
|
574 |
+
If None, all edge weights are considered equal.
|
575 |
+
Otherwise holds the name of the edge attribute used as weight.
|
576 |
+
The weight of an edge is treated as the length or distance between the two sides.
|
577 |
+
|
578 |
+
Raises
|
579 |
+
------
|
580 |
+
NodeNotFound
|
581 |
+
If node(s) in S are not present in G.
|
582 |
+
|
583 |
+
Returns
|
584 |
+
-------
|
585 |
+
closeness : float
|
586 |
+
Group closeness centrality of the group S.
|
587 |
+
|
588 |
+
See Also
|
589 |
+
--------
|
590 |
+
closeness_centrality
|
591 |
+
|
592 |
+
Notes
|
593 |
+
-----
|
594 |
+
The measure was introduced in [1]_.
|
595 |
+
The formula implemented here is described in [2]_.
|
596 |
+
|
597 |
+
Higher values of closeness indicate greater centrality.
|
598 |
+
|
599 |
+
It is assumed that 1 / 0 is 0 (required in the case of directed graphs,
|
600 |
+
or when a shortest path length is 0).
|
601 |
+
|
602 |
+
The number of nodes in the group must be a maximum of n - 1 where `n`
|
603 |
+
is the total number of nodes in the graph.
|
604 |
+
|
605 |
+
For directed graphs, the incoming distance is utilized here. To use the
|
606 |
+
outward distance, act on `G.reverse()`.
|
607 |
+
|
608 |
+
For weighted graphs the edge weights must be greater than zero.
|
609 |
+
Zero edge weights can produce an infinite number of equal length
|
610 |
+
paths between pairs of nodes.
|
611 |
+
|
612 |
+
References
|
613 |
+
----------
|
614 |
+
.. [1] M G Everett and S P Borgatti:
|
615 |
+
The Centrality of Groups and Classes.
|
616 |
+
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
617 |
+
http://www.analytictech.com/borgatti/group_centrality.htm
|
618 |
+
.. [2] J. Zhao et. al.:
|
619 |
+
Measuring and Maximizing Group Closeness Centrality over
|
620 |
+
Disk Resident Graphs.
|
621 |
+
WWWConference Proceedings, 2014. 689-694.
|
622 |
+
https://doi.org/10.1145/2567948.2579356
|
623 |
+
"""
|
624 |
+
if G.is_directed():
|
625 |
+
G = G.reverse() # reverse view
|
626 |
+
closeness = 0 # initialize to 0
|
627 |
+
V = set(G) # set of nodes in G
|
628 |
+
S = set(S) # set of nodes in group S
|
629 |
+
V_S = V - S # set of nodes in V but not S
|
630 |
+
shortest_path_lengths = nx.multi_source_dijkstra_path_length(G, S, weight=weight)
|
631 |
+
# accumulation
|
632 |
+
for v in V_S:
|
633 |
+
try:
|
634 |
+
closeness += shortest_path_lengths[v]
|
635 |
+
except KeyError: # no path exists
|
636 |
+
closeness += 0
|
637 |
+
try:
|
638 |
+
closeness = len(V_S) / closeness
|
639 |
+
except ZeroDivisionError: # 1 / 0 assumed as 0
|
640 |
+
closeness = 0
|
641 |
+
return closeness
|
642 |
+
|
643 |
+
|
644 |
+
@nx._dispatchable
|
645 |
+
def group_degree_centrality(G, S):
|
646 |
+
"""Compute the group degree centrality for a group of nodes.
|
647 |
+
|
648 |
+
Group degree centrality of a group of nodes $S$ is the fraction
|
649 |
+
of non-group members connected to group members.
|
650 |
+
|
651 |
+
Parameters
|
652 |
+
----------
|
653 |
+
G : graph
|
654 |
+
A NetworkX graph.
|
655 |
+
|
656 |
+
S : list or set
|
657 |
+
S is a group of nodes which belong to G, for which group degree
|
658 |
+
centrality is to be calculated.
|
659 |
+
|
660 |
+
Raises
|
661 |
+
------
|
662 |
+
NetworkXError
|
663 |
+
If node(s) in S are not in G.
|
664 |
+
|
665 |
+
Returns
|
666 |
+
-------
|
667 |
+
centrality : float
|
668 |
+
Group degree centrality of the group S.
|
669 |
+
|
670 |
+
See Also
|
671 |
+
--------
|
672 |
+
degree_centrality
|
673 |
+
group_in_degree_centrality
|
674 |
+
group_out_degree_centrality
|
675 |
+
|
676 |
+
Notes
|
677 |
+
-----
|
678 |
+
The measure was introduced in [1]_.
|
679 |
+
|
680 |
+
The number of nodes in the group must be a maximum of n - 1 where `n`
|
681 |
+
is the total number of nodes in the graph.
|
682 |
+
|
683 |
+
References
|
684 |
+
----------
|
685 |
+
.. [1] M G Everett and S P Borgatti:
|
686 |
+
The Centrality of Groups and Classes.
|
687 |
+
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
688 |
+
http://www.analytictech.com/borgatti/group_centrality.htm
|
689 |
+
"""
|
690 |
+
centrality = len(set().union(*[set(G.neighbors(i)) for i in S]) - set(S))
|
691 |
+
centrality /= len(G.nodes()) - len(S)
|
692 |
+
return centrality
|
693 |
+
|
694 |
+
|
695 |
+
@not_implemented_for("undirected")
|
696 |
+
@nx._dispatchable
|
697 |
+
def group_in_degree_centrality(G, S):
|
698 |
+
"""Compute the group in-degree centrality for a group of nodes.
|
699 |
+
|
700 |
+
Group in-degree centrality of a group of nodes $S$ is the fraction
|
701 |
+
of non-group members connected to group members by incoming edges.
|
702 |
+
|
703 |
+
Parameters
|
704 |
+
----------
|
705 |
+
G : graph
|
706 |
+
A NetworkX graph.
|
707 |
+
|
708 |
+
S : list or set
|
709 |
+
S is a group of nodes which belong to G, for which group in-degree
|
710 |
+
centrality is to be calculated.
|
711 |
+
|
712 |
+
Returns
|
713 |
+
-------
|
714 |
+
centrality : float
|
715 |
+
Group in-degree centrality of the group S.
|
716 |
+
|
717 |
+
Raises
|
718 |
+
------
|
719 |
+
NetworkXNotImplemented
|
720 |
+
If G is undirected.
|
721 |
+
|
722 |
+
NodeNotFound
|
723 |
+
If node(s) in S are not in G.
|
724 |
+
|
725 |
+
See Also
|
726 |
+
--------
|
727 |
+
degree_centrality
|
728 |
+
group_degree_centrality
|
729 |
+
group_out_degree_centrality
|
730 |
+
|
731 |
+
Notes
|
732 |
+
-----
|
733 |
+
The number of nodes in the group must be a maximum of n - 1 where `n`
|
734 |
+
is the total number of nodes in the graph.
|
735 |
+
|
736 |
+
`G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph,
|
737 |
+
so for group in-degree centrality, the reverse graph is used.
|
738 |
+
"""
|
739 |
+
return group_degree_centrality(G.reverse(), S)
|
740 |
+
|
741 |
+
|
742 |
+
@not_implemented_for("undirected")
|
743 |
+
@nx._dispatchable
|
744 |
+
def group_out_degree_centrality(G, S):
|
745 |
+
"""Compute the group out-degree centrality for a group of nodes.
|
746 |
+
|
747 |
+
Group out-degree centrality of a group of nodes $S$ is the fraction
|
748 |
+
of non-group members connected to group members by outgoing edges.
|
749 |
+
|
750 |
+
Parameters
|
751 |
+
----------
|
752 |
+
G : graph
|
753 |
+
A NetworkX graph.
|
754 |
+
|
755 |
+
S : list or set
|
756 |
+
S is a group of nodes which belong to G, for which group in-degree
|
757 |
+
centrality is to be calculated.
|
758 |
+
|
759 |
+
Returns
|
760 |
+
-------
|
761 |
+
centrality : float
|
762 |
+
Group out-degree centrality of the group S.
|
763 |
+
|
764 |
+
Raises
|
765 |
+
------
|
766 |
+
NetworkXNotImplemented
|
767 |
+
If G is undirected.
|
768 |
+
|
769 |
+
NodeNotFound
|
770 |
+
If node(s) in S are not in G.
|
771 |
+
|
772 |
+
See Also
|
773 |
+
--------
|
774 |
+
degree_centrality
|
775 |
+
group_degree_centrality
|
776 |
+
group_in_degree_centrality
|
777 |
+
|
778 |
+
Notes
|
779 |
+
-----
|
780 |
+
The number of nodes in the group must be a maximum of n - 1 where `n`
|
781 |
+
is the total number of nodes in the graph.
|
782 |
+
|
783 |
+
`G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph,
|
784 |
+
so for group out-degree centrality, the graph itself is used.
|
785 |
+
"""
|
786 |
+
return group_degree_centrality(G, S)
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/katz.py
ADDED
@@ -0,0 +1,330 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Katz centrality."""
|
2 |
+
import math
|
3 |
+
|
4 |
+
import networkx as nx
|
5 |
+
from networkx.utils import not_implemented_for
|
6 |
+
|
7 |
+
__all__ = ["katz_centrality", "katz_centrality_numpy"]
|
8 |
+
|
9 |
+
|
10 |
+
@not_implemented_for("multigraph")
|
11 |
+
@nx._dispatchable(edge_attrs="weight")
|
12 |
+
def katz_centrality(
|
13 |
+
G,
|
14 |
+
alpha=0.1,
|
15 |
+
beta=1.0,
|
16 |
+
max_iter=1000,
|
17 |
+
tol=1.0e-6,
|
18 |
+
nstart=None,
|
19 |
+
normalized=True,
|
20 |
+
weight=None,
|
21 |
+
):
|
22 |
+
r"""Compute the Katz centrality for the nodes of the graph G.
|
23 |
+
|
24 |
+
Katz centrality computes the centrality for a node based on the centrality
|
25 |
+
of its neighbors. It is a generalization of the eigenvector centrality. The
|
26 |
+
Katz centrality for node $i$ is
|
27 |
+
|
28 |
+
.. math::
|
29 |
+
|
30 |
+
x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
|
31 |
+
|
32 |
+
where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
|
33 |
+
|
34 |
+
The parameter $\beta$ controls the initial centrality and
|
35 |
+
|
36 |
+
.. math::
|
37 |
+
|
38 |
+
\alpha < \frac{1}{\lambda_{\max}}.
|
39 |
+
|
40 |
+
Katz centrality computes the relative influence of a node within a
|
41 |
+
network by measuring the number of the immediate neighbors (first
|
42 |
+
degree nodes) and also all other nodes in the network that connect
|
43 |
+
to the node under consideration through these immediate neighbors.
|
44 |
+
|
45 |
+
Extra weight can be provided to immediate neighbors through the
|
46 |
+
parameter $\beta$. Connections made with distant neighbors
|
47 |
+
are, however, penalized by an attenuation factor $\alpha$ which
|
48 |
+
should be strictly less than the inverse largest eigenvalue of the
|
49 |
+
adjacency matrix in order for the Katz centrality to be computed
|
50 |
+
correctly. More information is provided in [1]_.
|
51 |
+
|
52 |
+
Parameters
|
53 |
+
----------
|
54 |
+
G : graph
|
55 |
+
A NetworkX graph.
|
56 |
+
|
57 |
+
alpha : float, optional (default=0.1)
|
58 |
+
Attenuation factor
|
59 |
+
|
60 |
+
beta : scalar or dictionary, optional (default=1.0)
|
61 |
+
Weight attributed to the immediate neighborhood. If not a scalar, the
|
62 |
+
dictionary must have a value for every node.
|
63 |
+
|
64 |
+
max_iter : integer, optional (default=1000)
|
65 |
+
Maximum number of iterations in power method.
|
66 |
+
|
67 |
+
tol : float, optional (default=1.0e-6)
|
68 |
+
Error tolerance used to check convergence in power method iteration.
|
69 |
+
|
70 |
+
nstart : dictionary, optional
|
71 |
+
Starting value of Katz iteration for each node.
|
72 |
+
|
73 |
+
normalized : bool, optional (default=True)
|
74 |
+
If True normalize the resulting values.
|
75 |
+
|
76 |
+
weight : None or string, optional (default=None)
|
77 |
+
If None, all edge weights are considered equal.
|
78 |
+
Otherwise holds the name of the edge attribute used as weight.
|
79 |
+
In this measure the weight is interpreted as the connection strength.
|
80 |
+
|
81 |
+
Returns
|
82 |
+
-------
|
83 |
+
nodes : dictionary
|
84 |
+
Dictionary of nodes with Katz centrality as the value.
|
85 |
+
|
86 |
+
Raises
|
87 |
+
------
|
88 |
+
NetworkXError
|
89 |
+
If the parameter `beta` is not a scalar but lacks a value for at least
|
90 |
+
one node
|
91 |
+
|
92 |
+
PowerIterationFailedConvergence
|
93 |
+
If the algorithm fails to converge to the specified tolerance
|
94 |
+
within the specified number of iterations of the power iteration
|
95 |
+
method.
|
96 |
+
|
97 |
+
Examples
|
98 |
+
--------
|
99 |
+
>>> import math
|
100 |
+
>>> G = nx.path_graph(4)
|
101 |
+
>>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
|
102 |
+
>>> centrality = nx.katz_centrality(G, 1 / phi - 0.01)
|
103 |
+
>>> for n, c in sorted(centrality.items()):
|
104 |
+
... print(f"{n} {c:.2f}")
|
105 |
+
0 0.37
|
106 |
+
1 0.60
|
107 |
+
2 0.60
|
108 |
+
3 0.37
|
109 |
+
|
110 |
+
See Also
|
111 |
+
--------
|
112 |
+
katz_centrality_numpy
|
113 |
+
eigenvector_centrality
|
114 |
+
eigenvector_centrality_numpy
|
115 |
+
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
116 |
+
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
117 |
+
|
118 |
+
Notes
|
119 |
+
-----
|
120 |
+
Katz centrality was introduced by [2]_.
|
121 |
+
|
122 |
+
This algorithm it uses the power method to find the eigenvector
|
123 |
+
corresponding to the largest eigenvalue of the adjacency matrix of ``G``.
|
124 |
+
The parameter ``alpha`` should be strictly less than the inverse of largest
|
125 |
+
eigenvalue of the adjacency matrix for the algorithm to converge.
|
126 |
+
You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
|
127 |
+
eigenvalue of the adjacency matrix.
|
128 |
+
The iteration will stop after ``max_iter`` iterations or an error tolerance of
|
129 |
+
``number_of_nodes(G) * tol`` has been reached.
|
130 |
+
|
131 |
+
For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
|
132 |
+
Katz centrality approaches the results for eigenvector centrality.
|
133 |
+
|
134 |
+
For directed graphs this finds "left" eigenvectors which corresponds
|
135 |
+
to the in-edges in the graph. For out-edges Katz centrality,
|
136 |
+
first reverse the graph with ``G.reverse()``.
|
137 |
+
|
138 |
+
References
|
139 |
+
----------
|
140 |
+
.. [1] Mark E. J. Newman:
|
141 |
+
Networks: An Introduction.
|
142 |
+
Oxford University Press, USA, 2010, p. 720.
|
143 |
+
.. [2] Leo Katz:
|
144 |
+
A New Status Index Derived from Sociometric Index.
|
145 |
+
Psychometrika 18(1):39–43, 1953
|
146 |
+
https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
|
147 |
+
"""
|
148 |
+
if len(G) == 0:
|
149 |
+
return {}
|
150 |
+
|
151 |
+
nnodes = G.number_of_nodes()
|
152 |
+
|
153 |
+
if nstart is None:
|
154 |
+
# choose starting vector with entries of 0
|
155 |
+
x = {n: 0 for n in G}
|
156 |
+
else:
|
157 |
+
x = nstart
|
158 |
+
|
159 |
+
try:
|
160 |
+
b = dict.fromkeys(G, float(beta))
|
161 |
+
except (TypeError, ValueError, AttributeError) as err:
|
162 |
+
b = beta
|
163 |
+
if set(beta) != set(G):
|
164 |
+
raise nx.NetworkXError(
|
165 |
+
"beta dictionary must have a value for every node"
|
166 |
+
) from err
|
167 |
+
|
168 |
+
# make up to max_iter iterations
|
169 |
+
for _ in range(max_iter):
|
170 |
+
xlast = x
|
171 |
+
x = dict.fromkeys(xlast, 0)
|
172 |
+
# do the multiplication y^T = Alpha * x^T A + Beta
|
173 |
+
for n in x:
|
174 |
+
for nbr in G[n]:
|
175 |
+
x[nbr] += xlast[n] * G[n][nbr].get(weight, 1)
|
176 |
+
for n in x:
|
177 |
+
x[n] = alpha * x[n] + b[n]
|
178 |
+
|
179 |
+
# check convergence
|
180 |
+
error = sum(abs(x[n] - xlast[n]) for n in x)
|
181 |
+
if error < nnodes * tol:
|
182 |
+
if normalized:
|
183 |
+
# normalize vector
|
184 |
+
try:
|
185 |
+
s = 1.0 / math.hypot(*x.values())
|
186 |
+
except ZeroDivisionError:
|
187 |
+
s = 1.0
|
188 |
+
else:
|
189 |
+
s = 1
|
190 |
+
for n in x:
|
191 |
+
x[n] *= s
|
192 |
+
return x
|
193 |
+
raise nx.PowerIterationFailedConvergence(max_iter)
|
194 |
+
|
195 |
+
|
196 |
+
@not_implemented_for("multigraph")
|
197 |
+
@nx._dispatchable(edge_attrs="weight")
|
198 |
+
def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None):
|
199 |
+
r"""Compute the Katz centrality for the graph G.
|
200 |
+
|
201 |
+
Katz centrality computes the centrality for a node based on the centrality
|
202 |
+
of its neighbors. It is a generalization of the eigenvector centrality. The
|
203 |
+
Katz centrality for node $i$ is
|
204 |
+
|
205 |
+
.. math::
|
206 |
+
|
207 |
+
x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
|
208 |
+
|
209 |
+
where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
|
210 |
+
|
211 |
+
The parameter $\beta$ controls the initial centrality and
|
212 |
+
|
213 |
+
.. math::
|
214 |
+
|
215 |
+
\alpha < \frac{1}{\lambda_{\max}}.
|
216 |
+
|
217 |
+
Katz centrality computes the relative influence of a node within a
|
218 |
+
network by measuring the number of the immediate neighbors (first
|
219 |
+
degree nodes) and also all other nodes in the network that connect
|
220 |
+
to the node under consideration through these immediate neighbors.
|
221 |
+
|
222 |
+
Extra weight can be provided to immediate neighbors through the
|
223 |
+
parameter $\beta$. Connections made with distant neighbors
|
224 |
+
are, however, penalized by an attenuation factor $\alpha$ which
|
225 |
+
should be strictly less than the inverse largest eigenvalue of the
|
226 |
+
adjacency matrix in order for the Katz centrality to be computed
|
227 |
+
correctly. More information is provided in [1]_.
|
228 |
+
|
229 |
+
Parameters
|
230 |
+
----------
|
231 |
+
G : graph
|
232 |
+
A NetworkX graph
|
233 |
+
|
234 |
+
alpha : float
|
235 |
+
Attenuation factor
|
236 |
+
|
237 |
+
beta : scalar or dictionary, optional (default=1.0)
|
238 |
+
Weight attributed to the immediate neighborhood. If not a scalar the
|
239 |
+
dictionary must have an value for every node.
|
240 |
+
|
241 |
+
normalized : bool
|
242 |
+
If True normalize the resulting values.
|
243 |
+
|
244 |
+
weight : None or string, optional
|
245 |
+
If None, all edge weights are considered equal.
|
246 |
+
Otherwise holds the name of the edge attribute used as weight.
|
247 |
+
In this measure the weight is interpreted as the connection strength.
|
248 |
+
|
249 |
+
Returns
|
250 |
+
-------
|
251 |
+
nodes : dictionary
|
252 |
+
Dictionary of nodes with Katz centrality as the value.
|
253 |
+
|
254 |
+
Raises
|
255 |
+
------
|
256 |
+
NetworkXError
|
257 |
+
If the parameter `beta` is not a scalar but lacks a value for at least
|
258 |
+
one node
|
259 |
+
|
260 |
+
Examples
|
261 |
+
--------
|
262 |
+
>>> import math
|
263 |
+
>>> G = nx.path_graph(4)
|
264 |
+
>>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
|
265 |
+
>>> centrality = nx.katz_centrality_numpy(G, 1 / phi)
|
266 |
+
>>> for n, c in sorted(centrality.items()):
|
267 |
+
... print(f"{n} {c:.2f}")
|
268 |
+
0 0.37
|
269 |
+
1 0.60
|
270 |
+
2 0.60
|
271 |
+
3 0.37
|
272 |
+
|
273 |
+
See Also
|
274 |
+
--------
|
275 |
+
katz_centrality
|
276 |
+
eigenvector_centrality_numpy
|
277 |
+
eigenvector_centrality
|
278 |
+
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
279 |
+
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
280 |
+
|
281 |
+
Notes
|
282 |
+
-----
|
283 |
+
Katz centrality was introduced by [2]_.
|
284 |
+
|
285 |
+
This algorithm uses a direct linear solver to solve the above equation.
|
286 |
+
The parameter ``alpha`` should be strictly less than the inverse of largest
|
287 |
+
eigenvalue of the adjacency matrix for there to be a solution.
|
288 |
+
You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
|
289 |
+
eigenvalue of the adjacency matrix.
|
290 |
+
|
291 |
+
For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
|
292 |
+
Katz centrality approaches the results for eigenvector centrality.
|
293 |
+
|
294 |
+
For directed graphs this finds "left" eigenvectors which corresponds
|
295 |
+
to the in-edges in the graph. For out-edges Katz centrality,
|
296 |
+
first reverse the graph with ``G.reverse()``.
|
297 |
+
|
298 |
+
References
|
299 |
+
----------
|
300 |
+
.. [1] Mark E. J. Newman:
|
301 |
+
Networks: An Introduction.
|
302 |
+
Oxford University Press, USA, 2010, p. 173.
|
303 |
+
.. [2] Leo Katz:
|
304 |
+
A New Status Index Derived from Sociometric Index.
|
305 |
+
Psychometrika 18(1):39–43, 1953
|
306 |
+
https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
|
307 |
+
"""
|
308 |
+
import numpy as np
|
309 |
+
|
310 |
+
if len(G) == 0:
|
311 |
+
return {}
|
312 |
+
try:
|
313 |
+
nodelist = beta.keys()
|
314 |
+
if set(nodelist) != set(G):
|
315 |
+
raise nx.NetworkXError("beta dictionary must have a value for every node")
|
316 |
+
b = np.array(list(beta.values()), dtype=float)
|
317 |
+
except AttributeError:
|
318 |
+
nodelist = list(G)
|
319 |
+
try:
|
320 |
+
b = np.ones((len(nodelist), 1)) * beta
|
321 |
+
except (TypeError, ValueError, AttributeError) as err:
|
322 |
+
raise nx.NetworkXError("beta must be a number") from err
|
323 |
+
|
324 |
+
A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight).todense().T
|
325 |
+
n = A.shape[0]
|
326 |
+
centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b).squeeze()
|
327 |
+
|
328 |
+
# Normalize: rely on truediv to cast to float, then tolist to make Python numbers
|
329 |
+
norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) if normalized else 1
|
330 |
+
return dict(zip(nodelist, (centrality / norm).tolist()))
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/second_order.py
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Copyright (c) 2015 – Thomson Licensing, SAS
|
2 |
+
|
3 |
+
Redistribution and use in source and binary forms, with or without
|
4 |
+
modification, are permitted (subject to the limitations in the
|
5 |
+
disclaimer below) provided that the following conditions are met:
|
6 |
+
|
7 |
+
* Redistributions of source code must retain the above copyright
|
8 |
+
notice, this list of conditions and the following disclaimer.
|
9 |
+
|
10 |
+
* Redistributions in binary form must reproduce the above copyright
|
11 |
+
notice, this list of conditions and the following disclaimer in the
|
12 |
+
documentation and/or other materials provided with the distribution.
|
13 |
+
|
14 |
+
* Neither the name of Thomson Licensing, or Technicolor, nor the names
|
15 |
+
of its contributors may be used to endorse or promote products derived
|
16 |
+
from this software without specific prior written permission.
|
17 |
+
|
18 |
+
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
|
19 |
+
GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
|
20 |
+
HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
|
21 |
+
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
22 |
+
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
23 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
24 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
25 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
26 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
27 |
+
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
28 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
29 |
+
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
30 |
+
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
31 |
+
"""
|
32 |
+
|
33 |
+
import networkx as nx
|
34 |
+
from networkx.utils import not_implemented_for
|
35 |
+
|
36 |
+
# Authors: Erwan Le Merrer ([email protected])
|
37 |
+
|
38 |
+
__all__ = ["second_order_centrality"]
|
39 |
+
|
40 |
+
|
41 |
+
@not_implemented_for("directed")
|
42 |
+
@nx._dispatchable(edge_attrs="weight")
|
43 |
+
def second_order_centrality(G, weight="weight"):
|
44 |
+
"""Compute the second order centrality for nodes of G.
|
45 |
+
|
46 |
+
The second order centrality of a given node is the standard deviation of
|
47 |
+
the return times to that node of a perpetual random walk on G:
|
48 |
+
|
49 |
+
Parameters
|
50 |
+
----------
|
51 |
+
G : graph
|
52 |
+
A NetworkX connected and undirected graph.
|
53 |
+
|
54 |
+
weight : string or None, optional (default="weight")
|
55 |
+
The name of an edge attribute that holds the numerical value
|
56 |
+
used as a weight. If None then each edge has weight 1.
|
57 |
+
|
58 |
+
Returns
|
59 |
+
-------
|
60 |
+
nodes : dictionary
|
61 |
+
Dictionary keyed by node with second order centrality as the value.
|
62 |
+
|
63 |
+
Examples
|
64 |
+
--------
|
65 |
+
>>> G = nx.star_graph(10)
|
66 |
+
>>> soc = nx.second_order_centrality(G)
|
67 |
+
>>> print(sorted(soc.items(), key=lambda x: x[1])[0][0]) # pick first id
|
68 |
+
0
|
69 |
+
|
70 |
+
Raises
|
71 |
+
------
|
72 |
+
NetworkXException
|
73 |
+
If the graph G is empty, non connected or has negative weights.
|
74 |
+
|
75 |
+
See Also
|
76 |
+
--------
|
77 |
+
betweenness_centrality
|
78 |
+
|
79 |
+
Notes
|
80 |
+
-----
|
81 |
+
Lower values of second order centrality indicate higher centrality.
|
82 |
+
|
83 |
+
The algorithm is from Kermarrec, Le Merrer, Sericola and Trédan [1]_.
|
84 |
+
|
85 |
+
This code implements the analytical version of the algorithm, i.e.,
|
86 |
+
there is no simulation of a random walk process involved. The random walk
|
87 |
+
is here unbiased (corresponding to eq 6 of the paper [1]_), thus the
|
88 |
+
centrality values are the standard deviations for random walk return times
|
89 |
+
on the transformed input graph G (equal in-degree at each nodes by adding
|
90 |
+
self-loops).
|
91 |
+
|
92 |
+
Complexity of this implementation, made to run locally on a single machine,
|
93 |
+
is O(n^3), with n the size of G, which makes it viable only for small
|
94 |
+
graphs.
|
95 |
+
|
96 |
+
References
|
97 |
+
----------
|
98 |
+
.. [1] Anne-Marie Kermarrec, Erwan Le Merrer, Bruno Sericola, Gilles Trédan
|
99 |
+
"Second order centrality: Distributed assessment of nodes criticity in
|
100 |
+
complex networks", Elsevier Computer Communications 34(5):619-628, 2011.
|
101 |
+
"""
|
102 |
+
import numpy as np
|
103 |
+
|
104 |
+
n = len(G)
|
105 |
+
|
106 |
+
if n == 0:
|
107 |
+
raise nx.NetworkXException("Empty graph.")
|
108 |
+
if not nx.is_connected(G):
|
109 |
+
raise nx.NetworkXException("Non connected graph.")
|
110 |
+
if any(d.get(weight, 0) < 0 for u, v, d in G.edges(data=True)):
|
111 |
+
raise nx.NetworkXException("Graph has negative edge weights.")
|
112 |
+
|
113 |
+
# balancing G for Metropolis-Hastings random walks
|
114 |
+
G = nx.DiGraph(G)
|
115 |
+
in_deg = dict(G.in_degree(weight=weight))
|
116 |
+
d_max = max(in_deg.values())
|
117 |
+
for i, deg in in_deg.items():
|
118 |
+
if deg < d_max:
|
119 |
+
G.add_edge(i, i, weight=d_max - deg)
|
120 |
+
|
121 |
+
P = nx.to_numpy_array(G)
|
122 |
+
P /= P.sum(axis=1)[:, np.newaxis] # to transition probability matrix
|
123 |
+
|
124 |
+
def _Qj(P, j):
|
125 |
+
P = P.copy()
|
126 |
+
P[:, j] = 0
|
127 |
+
return P
|
128 |
+
|
129 |
+
M = np.empty([n, n])
|
130 |
+
|
131 |
+
for i in range(n):
|
132 |
+
M[:, i] = np.linalg.solve(
|
133 |
+
np.identity(n) - _Qj(P, i), np.ones([n, 1])[:, 0]
|
134 |
+
) # eq 3
|
135 |
+
|
136 |
+
return dict(
|
137 |
+
zip(
|
138 |
+
G.nodes,
|
139 |
+
(float(np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1))) for i in range(n)),
|
140 |
+
)
|
141 |
+
) # eq 6
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/subgraph_alg.py
ADDED
@@ -0,0 +1,339 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Subraph centrality and communicability betweenness.
|
3 |
+
"""
|
4 |
+
import networkx as nx
|
5 |
+
from networkx.utils import not_implemented_for
|
6 |
+
|
7 |
+
__all__ = [
|
8 |
+
"subgraph_centrality_exp",
|
9 |
+
"subgraph_centrality",
|
10 |
+
"communicability_betweenness_centrality",
|
11 |
+
"estrada_index",
|
12 |
+
]
|
13 |
+
|
14 |
+
|
15 |
+
@not_implemented_for("directed")
|
16 |
+
@not_implemented_for("multigraph")
|
17 |
+
@nx._dispatchable
|
18 |
+
def subgraph_centrality_exp(G):
|
19 |
+
r"""Returns the subgraph centrality for each node of G.
|
20 |
+
|
21 |
+
Subgraph centrality of a node `n` is the sum of weighted closed
|
22 |
+
walks of all lengths starting and ending at node `n`. The weights
|
23 |
+
decrease with path length. Each closed walk is associated with a
|
24 |
+
connected subgraph ([1]_).
|
25 |
+
|
26 |
+
Parameters
|
27 |
+
----------
|
28 |
+
G: graph
|
29 |
+
|
30 |
+
Returns
|
31 |
+
-------
|
32 |
+
nodes:dictionary
|
33 |
+
Dictionary of nodes with subgraph centrality as the value.
|
34 |
+
|
35 |
+
Raises
|
36 |
+
------
|
37 |
+
NetworkXError
|
38 |
+
If the graph is not undirected and simple.
|
39 |
+
|
40 |
+
See Also
|
41 |
+
--------
|
42 |
+
subgraph_centrality:
|
43 |
+
Alternative algorithm of the subgraph centrality for each node of G.
|
44 |
+
|
45 |
+
Notes
|
46 |
+
-----
|
47 |
+
This version of the algorithm exponentiates the adjacency matrix.
|
48 |
+
|
49 |
+
The subgraph centrality of a node `u` in G can be found using
|
50 |
+
the matrix exponential of the adjacency matrix of G [1]_,
|
51 |
+
|
52 |
+
.. math::
|
53 |
+
|
54 |
+
SC(u)=(e^A)_{uu} .
|
55 |
+
|
56 |
+
References
|
57 |
+
----------
|
58 |
+
.. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
|
59 |
+
"Subgraph centrality in complex networks",
|
60 |
+
Physical Review E 71, 056103 (2005).
|
61 |
+
https://arxiv.org/abs/cond-mat/0504730
|
62 |
+
|
63 |
+
Examples
|
64 |
+
--------
|
65 |
+
(Example from [1]_)
|
66 |
+
>>> G = nx.Graph(
|
67 |
+
... [
|
68 |
+
... (1, 2),
|
69 |
+
... (1, 5),
|
70 |
+
... (1, 8),
|
71 |
+
... (2, 3),
|
72 |
+
... (2, 8),
|
73 |
+
... (3, 4),
|
74 |
+
... (3, 6),
|
75 |
+
... (4, 5),
|
76 |
+
... (4, 7),
|
77 |
+
... (5, 6),
|
78 |
+
... (6, 7),
|
79 |
+
... (7, 8),
|
80 |
+
... ]
|
81 |
+
... )
|
82 |
+
>>> sc = nx.subgraph_centrality_exp(G)
|
83 |
+
>>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
|
84 |
+
['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
|
85 |
+
"""
|
86 |
+
# alternative implementation that calculates the matrix exponential
|
87 |
+
import scipy as sp
|
88 |
+
|
89 |
+
nodelist = list(G) # ordering of nodes in matrix
|
90 |
+
A = nx.to_numpy_array(G, nodelist)
|
91 |
+
# convert to 0-1 matrix
|
92 |
+
A[A != 0.0] = 1
|
93 |
+
expA = sp.linalg.expm(A)
|
94 |
+
# convert diagonal to dictionary keyed by node
|
95 |
+
sc = dict(zip(nodelist, map(float, expA.diagonal())))
|
96 |
+
return sc
|
97 |
+
|
98 |
+
|
99 |
+
@not_implemented_for("directed")
|
100 |
+
@not_implemented_for("multigraph")
|
101 |
+
@nx._dispatchable
|
102 |
+
def subgraph_centrality(G):
|
103 |
+
r"""Returns subgraph centrality for each node in G.
|
104 |
+
|
105 |
+
Subgraph centrality of a node `n` is the sum of weighted closed
|
106 |
+
walks of all lengths starting and ending at node `n`. The weights
|
107 |
+
decrease with path length. Each closed walk is associated with a
|
108 |
+
connected subgraph ([1]_).
|
109 |
+
|
110 |
+
Parameters
|
111 |
+
----------
|
112 |
+
G: graph
|
113 |
+
|
114 |
+
Returns
|
115 |
+
-------
|
116 |
+
nodes : dictionary
|
117 |
+
Dictionary of nodes with subgraph centrality as the value.
|
118 |
+
|
119 |
+
Raises
|
120 |
+
------
|
121 |
+
NetworkXError
|
122 |
+
If the graph is not undirected and simple.
|
123 |
+
|
124 |
+
See Also
|
125 |
+
--------
|
126 |
+
subgraph_centrality_exp:
|
127 |
+
Alternative algorithm of the subgraph centrality for each node of G.
|
128 |
+
|
129 |
+
Notes
|
130 |
+
-----
|
131 |
+
This version of the algorithm computes eigenvalues and eigenvectors
|
132 |
+
of the adjacency matrix.
|
133 |
+
|
134 |
+
Subgraph centrality of a node `u` in G can be found using
|
135 |
+
a spectral decomposition of the adjacency matrix [1]_,
|
136 |
+
|
137 |
+
.. math::
|
138 |
+
|
139 |
+
SC(u)=\sum_{j=1}^{N}(v_{j}^{u})^2 e^{\lambda_{j}},
|
140 |
+
|
141 |
+
where `v_j` is an eigenvector of the adjacency matrix `A` of G
|
142 |
+
corresponding to the eigenvalue `\lambda_j`.
|
143 |
+
|
144 |
+
Examples
|
145 |
+
--------
|
146 |
+
(Example from [1]_)
|
147 |
+
>>> G = nx.Graph(
|
148 |
+
... [
|
149 |
+
... (1, 2),
|
150 |
+
... (1, 5),
|
151 |
+
... (1, 8),
|
152 |
+
... (2, 3),
|
153 |
+
... (2, 8),
|
154 |
+
... (3, 4),
|
155 |
+
... (3, 6),
|
156 |
+
... (4, 5),
|
157 |
+
... (4, 7),
|
158 |
+
... (5, 6),
|
159 |
+
... (6, 7),
|
160 |
+
... (7, 8),
|
161 |
+
... ]
|
162 |
+
... )
|
163 |
+
>>> sc = nx.subgraph_centrality(G)
|
164 |
+
>>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
|
165 |
+
['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
|
166 |
+
|
167 |
+
References
|
168 |
+
----------
|
169 |
+
.. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
|
170 |
+
"Subgraph centrality in complex networks",
|
171 |
+
Physical Review E 71, 056103 (2005).
|
172 |
+
https://arxiv.org/abs/cond-mat/0504730
|
173 |
+
|
174 |
+
"""
|
175 |
+
import numpy as np
|
176 |
+
|
177 |
+
nodelist = list(G) # ordering of nodes in matrix
|
178 |
+
A = nx.to_numpy_array(G, nodelist)
|
179 |
+
# convert to 0-1 matrix
|
180 |
+
A[np.nonzero(A)] = 1
|
181 |
+
w, v = np.linalg.eigh(A)
|
182 |
+
vsquare = np.array(v) ** 2
|
183 |
+
expw = np.exp(w)
|
184 |
+
xg = vsquare @ expw
|
185 |
+
# convert vector dictionary keyed by node
|
186 |
+
sc = dict(zip(nodelist, map(float, xg)))
|
187 |
+
return sc
|
188 |
+
|
189 |
+
|
190 |
+
@not_implemented_for("directed")
|
191 |
+
@not_implemented_for("multigraph")
|
192 |
+
@nx._dispatchable
|
193 |
+
def communicability_betweenness_centrality(G):
|
194 |
+
r"""Returns subgraph communicability for all pairs of nodes in G.
|
195 |
+
|
196 |
+
Communicability betweenness measure makes use of the number of walks
|
197 |
+
connecting every pair of nodes as the basis of a betweenness centrality
|
198 |
+
measure.
|
199 |
+
|
200 |
+
Parameters
|
201 |
+
----------
|
202 |
+
G: graph
|
203 |
+
|
204 |
+
Returns
|
205 |
+
-------
|
206 |
+
nodes : dictionary
|
207 |
+
Dictionary of nodes with communicability betweenness as the value.
|
208 |
+
|
209 |
+
Raises
|
210 |
+
------
|
211 |
+
NetworkXError
|
212 |
+
If the graph is not undirected and simple.
|
213 |
+
|
214 |
+
Notes
|
215 |
+
-----
|
216 |
+
Let `G=(V,E)` be a simple undirected graph with `n` nodes and `m` edges,
|
217 |
+
and `A` denote the adjacency matrix of `G`.
|
218 |
+
|
219 |
+
Let `G(r)=(V,E(r))` be the graph resulting from
|
220 |
+
removing all edges connected to node `r` but not the node itself.
|
221 |
+
|
222 |
+
The adjacency matrix for `G(r)` is `A+E(r)`, where `E(r)` has nonzeros
|
223 |
+
only in row and column `r`.
|
224 |
+
|
225 |
+
The subraph betweenness of a node `r` is [1]_
|
226 |
+
|
227 |
+
.. math::
|
228 |
+
|
229 |
+
\omega_{r} = \frac{1}{C}\sum_{p}\sum_{q}\frac{G_{prq}}{G_{pq}},
|
230 |
+
p\neq q, q\neq r,
|
231 |
+
|
232 |
+
where
|
233 |
+
`G_{prq}=(e^{A}_{pq} - (e^{A+E(r)})_{pq}` is the number of walks
|
234 |
+
involving node r,
|
235 |
+
`G_{pq}=(e^{A})_{pq}` is the number of closed walks starting
|
236 |
+
at node `p` and ending at node `q`,
|
237 |
+
and `C=(n-1)^{2}-(n-1)` is a normalization factor equal to the
|
238 |
+
number of terms in the sum.
|
239 |
+
|
240 |
+
The resulting `\omega_{r}` takes values between zero and one.
|
241 |
+
The lower bound cannot be attained for a connected
|
242 |
+
graph, and the upper bound is attained in the star graph.
|
243 |
+
|
244 |
+
References
|
245 |
+
----------
|
246 |
+
.. [1] Ernesto Estrada, Desmond J. Higham, Naomichi Hatano,
|
247 |
+
"Communicability Betweenness in Complex Networks"
|
248 |
+
Physica A 388 (2009) 764-774.
|
249 |
+
https://arxiv.org/abs/0905.4102
|
250 |
+
|
251 |
+
Examples
|
252 |
+
--------
|
253 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
|
254 |
+
>>> cbc = nx.communicability_betweenness_centrality(G)
|
255 |
+
>>> print([f"{node} {cbc[node]:0.2f}" for node in sorted(cbc)])
|
256 |
+
['0 0.03', '1 0.45', '2 0.51', '3 0.45', '4 0.40', '5 0.19', '6 0.03']
|
257 |
+
"""
|
258 |
+
import numpy as np
|
259 |
+
import scipy as sp
|
260 |
+
|
261 |
+
nodelist = list(G) # ordering of nodes in matrix
|
262 |
+
n = len(nodelist)
|
263 |
+
A = nx.to_numpy_array(G, nodelist)
|
264 |
+
# convert to 0-1 matrix
|
265 |
+
A[np.nonzero(A)] = 1
|
266 |
+
expA = sp.linalg.expm(A)
|
267 |
+
mapping = dict(zip(nodelist, range(n)))
|
268 |
+
cbc = {}
|
269 |
+
for v in G:
|
270 |
+
# remove row and col of node v
|
271 |
+
i = mapping[v]
|
272 |
+
row = A[i, :].copy()
|
273 |
+
col = A[:, i].copy()
|
274 |
+
A[i, :] = 0
|
275 |
+
A[:, i] = 0
|
276 |
+
B = (expA - sp.linalg.expm(A)) / expA
|
277 |
+
# sum with row/col of node v and diag set to zero
|
278 |
+
B[i, :] = 0
|
279 |
+
B[:, i] = 0
|
280 |
+
B -= np.diag(np.diag(B))
|
281 |
+
cbc[v] = float(B.sum())
|
282 |
+
# put row and col back
|
283 |
+
A[i, :] = row
|
284 |
+
A[:, i] = col
|
285 |
+
# rescale when more than two nodes
|
286 |
+
order = len(cbc)
|
287 |
+
if order > 2:
|
288 |
+
scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0))
|
289 |
+
cbc = {node: value * scale for node, value in cbc.items()}
|
290 |
+
return cbc
|
291 |
+
|
292 |
+
|
293 |
+
@nx._dispatchable
|
294 |
+
def estrada_index(G):
|
295 |
+
r"""Returns the Estrada index of a the graph G.
|
296 |
+
|
297 |
+
The Estrada Index is a topological index of folding or 3D "compactness" ([1]_).
|
298 |
+
|
299 |
+
Parameters
|
300 |
+
----------
|
301 |
+
G: graph
|
302 |
+
|
303 |
+
Returns
|
304 |
+
-------
|
305 |
+
estrada index: float
|
306 |
+
|
307 |
+
Raises
|
308 |
+
------
|
309 |
+
NetworkXError
|
310 |
+
If the graph is not undirected and simple.
|
311 |
+
|
312 |
+
Notes
|
313 |
+
-----
|
314 |
+
Let `G=(V,E)` be a simple undirected graph with `n` nodes and let
|
315 |
+
`\lambda_{1}\leq\lambda_{2}\leq\cdots\lambda_{n}`
|
316 |
+
be a non-increasing ordering of the eigenvalues of its adjacency
|
317 |
+
matrix `A`. The Estrada index is ([1]_, [2]_)
|
318 |
+
|
319 |
+
.. math::
|
320 |
+
EE(G)=\sum_{j=1}^n e^{\lambda _j}.
|
321 |
+
|
322 |
+
References
|
323 |
+
----------
|
324 |
+
.. [1] E. Estrada, "Characterization of 3D molecular structure",
|
325 |
+
Chem. Phys. Lett. 319, 713 (2000).
|
326 |
+
https://doi.org/10.1016/S0009-2614(00)00158-5
|
327 |
+
.. [2] José Antonio de la Peñaa, Ivan Gutman, Juan Rada,
|
328 |
+
"Estimating the Estrada index",
|
329 |
+
Linear Algebra and its Applications. 427, 1 (2007).
|
330 |
+
https://doi.org/10.1016/j.laa.2007.06.020
|
331 |
+
|
332 |
+
Examples
|
333 |
+
--------
|
334 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
|
335 |
+
>>> ei = nx.estrada_index(G)
|
336 |
+
>>> print(f"{ei:0.5}")
|
337 |
+
20.55
|
338 |
+
"""
|
339 |
+
return sum(subgraph_centrality(G).values())
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/__init__.py
ADDED
File without changes
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
ADDED
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
from networkx import approximate_current_flow_betweenness_centrality as approximate_cfbc
|
5 |
+
from networkx import edge_current_flow_betweenness_centrality as edge_current_flow
|
6 |
+
|
7 |
+
np = pytest.importorskip("numpy")
|
8 |
+
pytest.importorskip("scipy")
|
9 |
+
|
10 |
+
|
11 |
+
class TestFlowBetweennessCentrality:
|
12 |
+
def test_K4_normalized(self):
|
13 |
+
"""Betweenness centrality: K4"""
|
14 |
+
G = nx.complete_graph(4)
|
15 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
16 |
+
b_answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
|
17 |
+
for n in sorted(G):
|
18 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
19 |
+
G.add_edge(0, 1, weight=0.5, other=0.3)
|
20 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True, weight=None)
|
21 |
+
for n in sorted(G):
|
22 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
23 |
+
wb_answer = {0: 0.2222222, 1: 0.2222222, 2: 0.30555555, 3: 0.30555555}
|
24 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="weight")
|
25 |
+
for n in sorted(G):
|
26 |
+
assert b[n] == pytest.approx(wb_answer[n], abs=1e-7)
|
27 |
+
wb_answer = {0: 0.2051282, 1: 0.2051282, 2: 0.33974358, 3: 0.33974358}
|
28 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="other")
|
29 |
+
for n in sorted(G):
|
30 |
+
assert b[n] == pytest.approx(wb_answer[n], abs=1e-7)
|
31 |
+
|
32 |
+
def test_K4(self):
|
33 |
+
"""Betweenness centrality: K4"""
|
34 |
+
G = nx.complete_graph(4)
|
35 |
+
for solver in ["full", "lu", "cg"]:
|
36 |
+
b = nx.current_flow_betweenness_centrality(
|
37 |
+
G, normalized=False, solver=solver
|
38 |
+
)
|
39 |
+
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
|
40 |
+
for n in sorted(G):
|
41 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
42 |
+
|
43 |
+
def test_P4_normalized(self):
|
44 |
+
"""Betweenness centrality: P4 normalized"""
|
45 |
+
G = nx.path_graph(4)
|
46 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
47 |
+
b_answer = {0: 0, 1: 2.0 / 3, 2: 2.0 / 3, 3: 0}
|
48 |
+
for n in sorted(G):
|
49 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
50 |
+
|
51 |
+
def test_P4(self):
|
52 |
+
"""Betweenness centrality: P4"""
|
53 |
+
G = nx.path_graph(4)
|
54 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=False)
|
55 |
+
b_answer = {0: 0, 1: 2, 2: 2, 3: 0}
|
56 |
+
for n in sorted(G):
|
57 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
58 |
+
|
59 |
+
def test_star(self):
|
60 |
+
"""Betweenness centrality: star"""
|
61 |
+
G = nx.Graph()
|
62 |
+
nx.add_star(G, ["a", "b", "c", "d"])
|
63 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
64 |
+
b_answer = {"a": 1.0, "b": 0.0, "c": 0.0, "d": 0.0}
|
65 |
+
for n in sorted(G):
|
66 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
67 |
+
|
68 |
+
def test_solvers2(self):
|
69 |
+
"""Betweenness centrality: alternate solvers"""
|
70 |
+
G = nx.complete_graph(4)
|
71 |
+
for solver in ["full", "lu", "cg"]:
|
72 |
+
b = nx.current_flow_betweenness_centrality(
|
73 |
+
G, normalized=False, solver=solver
|
74 |
+
)
|
75 |
+
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
|
76 |
+
for n in sorted(G):
|
77 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
78 |
+
|
79 |
+
|
80 |
+
class TestApproximateFlowBetweennessCentrality:
|
81 |
+
def test_K4_normalized(self):
|
82 |
+
"Approximate current-flow betweenness centrality: K4 normalized"
|
83 |
+
G = nx.complete_graph(4)
|
84 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
85 |
+
epsilon = 0.1
|
86 |
+
ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
|
87 |
+
for n in sorted(G):
|
88 |
+
np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
|
89 |
+
|
90 |
+
def test_K4(self):
|
91 |
+
"Approximate current-flow betweenness centrality: K4"
|
92 |
+
G = nx.complete_graph(4)
|
93 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=False)
|
94 |
+
epsilon = 0.1
|
95 |
+
ba = approximate_cfbc(G, normalized=False, epsilon=0.5 * epsilon)
|
96 |
+
for n in sorted(G):
|
97 |
+
np.testing.assert_allclose(b[n], ba[n], atol=epsilon * len(G) ** 2)
|
98 |
+
|
99 |
+
def test_star(self):
|
100 |
+
"Approximate current-flow betweenness centrality: star"
|
101 |
+
G = nx.Graph()
|
102 |
+
nx.add_star(G, ["a", "b", "c", "d"])
|
103 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
104 |
+
epsilon = 0.1
|
105 |
+
ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
|
106 |
+
for n in sorted(G):
|
107 |
+
np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
|
108 |
+
|
109 |
+
def test_grid(self):
|
110 |
+
"Approximate current-flow betweenness centrality: 2d grid"
|
111 |
+
G = nx.grid_2d_graph(4, 4)
|
112 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
113 |
+
epsilon = 0.1
|
114 |
+
ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
|
115 |
+
for n in sorted(G):
|
116 |
+
np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
|
117 |
+
|
118 |
+
def test_seed(self):
|
119 |
+
G = nx.complete_graph(4)
|
120 |
+
b = approximate_cfbc(G, normalized=False, epsilon=0.05, seed=1)
|
121 |
+
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
|
122 |
+
for n in sorted(G):
|
123 |
+
np.testing.assert_allclose(b[n], b_answer[n], atol=0.1)
|
124 |
+
|
125 |
+
def test_solvers(self):
|
126 |
+
"Approximate current-flow betweenness centrality: solvers"
|
127 |
+
G = nx.complete_graph(4)
|
128 |
+
epsilon = 0.1
|
129 |
+
for solver in ["full", "lu", "cg"]:
|
130 |
+
b = approximate_cfbc(
|
131 |
+
G, normalized=False, solver=solver, epsilon=0.5 * epsilon
|
132 |
+
)
|
133 |
+
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
|
134 |
+
for n in sorted(G):
|
135 |
+
np.testing.assert_allclose(b[n], b_answer[n], atol=epsilon)
|
136 |
+
|
137 |
+
def test_lower_kmax(self):
|
138 |
+
G = nx.complete_graph(4)
|
139 |
+
with pytest.raises(nx.NetworkXError, match="Increase kmax or epsilon"):
|
140 |
+
nx.approximate_current_flow_betweenness_centrality(G, kmax=4)
|
141 |
+
|
142 |
+
|
143 |
+
class TestWeightedFlowBetweennessCentrality:
|
144 |
+
pass
|
145 |
+
|
146 |
+
|
147 |
+
class TestEdgeFlowBetweennessCentrality:
|
148 |
+
def test_K4(self):
|
149 |
+
"""Edge flow betweenness centrality: K4"""
|
150 |
+
G = nx.complete_graph(4)
|
151 |
+
b = edge_current_flow(G, normalized=True)
|
152 |
+
b_answer = dict.fromkeys(G.edges(), 0.25)
|
153 |
+
for (s, t), v1 in b_answer.items():
|
154 |
+
v2 = b.get((s, t), b.get((t, s)))
|
155 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
156 |
+
|
157 |
+
def test_K4_normalized(self):
|
158 |
+
"""Edge flow betweenness centrality: K4"""
|
159 |
+
G = nx.complete_graph(4)
|
160 |
+
b = edge_current_flow(G, normalized=False)
|
161 |
+
b_answer = dict.fromkeys(G.edges(), 0.75)
|
162 |
+
for (s, t), v1 in b_answer.items():
|
163 |
+
v2 = b.get((s, t), b.get((t, s)))
|
164 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
165 |
+
|
166 |
+
def test_C4(self):
|
167 |
+
"""Edge flow betweenness centrality: C4"""
|
168 |
+
G = nx.cycle_graph(4)
|
169 |
+
b = edge_current_flow(G, normalized=False)
|
170 |
+
b_answer = {(0, 1): 1.25, (0, 3): 1.25, (1, 2): 1.25, (2, 3): 1.25}
|
171 |
+
for (s, t), v1 in b_answer.items():
|
172 |
+
v2 = b.get((s, t), b.get((t, s)))
|
173 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
174 |
+
|
175 |
+
def test_P4(self):
|
176 |
+
"""Edge betweenness centrality: P4"""
|
177 |
+
G = nx.path_graph(4)
|
178 |
+
b = edge_current_flow(G, normalized=False)
|
179 |
+
b_answer = {(0, 1): 1.5, (1, 2): 2.0, (2, 3): 1.5}
|
180 |
+
for (s, t), v1 in b_answer.items():
|
181 |
+
v2 = b.get((s, t), b.get((t, s)))
|
182 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
183 |
+
|
184 |
+
|
185 |
+
@pytest.mark.parametrize(
|
186 |
+
"centrality_func",
|
187 |
+
(
|
188 |
+
nx.current_flow_betweenness_centrality,
|
189 |
+
nx.edge_current_flow_betweenness_centrality,
|
190 |
+
nx.approximate_current_flow_betweenness_centrality,
|
191 |
+
),
|
192 |
+
)
|
193 |
+
def test_unconnected_graphs_betweenness_centrality(centrality_func):
|
194 |
+
G = nx.Graph([(1, 2), (3, 4)])
|
195 |
+
G.add_node(5)
|
196 |
+
with pytest.raises(nx.NetworkXError, match="Graph not connected"):
|
197 |
+
centrality_func(G)
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import networkx as nx
|
2 |
+
|
3 |
+
|
4 |
+
def small_ego_G():
|
5 |
+
"""The sample network from https://arxiv.org/pdf/1310.6753v1.pdf"""
|
6 |
+
edges = [
|
7 |
+
("a", "b"),
|
8 |
+
("a", "c"),
|
9 |
+
("b", "c"),
|
10 |
+
("b", "d"),
|
11 |
+
("b", "e"),
|
12 |
+
("b", "f"),
|
13 |
+
("c", "d"),
|
14 |
+
("c", "f"),
|
15 |
+
("c", "h"),
|
16 |
+
("d", "f"),
|
17 |
+
("e", "f"),
|
18 |
+
("f", "h"),
|
19 |
+
("h", "j"),
|
20 |
+
("h", "k"),
|
21 |
+
("i", "j"),
|
22 |
+
("i", "k"),
|
23 |
+
("j", "k"),
|
24 |
+
("u", "a"),
|
25 |
+
("u", "b"),
|
26 |
+
("u", "c"),
|
27 |
+
("u", "d"),
|
28 |
+
("u", "e"),
|
29 |
+
("u", "f"),
|
30 |
+
("u", "g"),
|
31 |
+
("u", "h"),
|
32 |
+
("u", "i"),
|
33 |
+
("u", "j"),
|
34 |
+
("u", "k"),
|
35 |
+
]
|
36 |
+
G = nx.Graph()
|
37 |
+
G.add_edges_from(edges)
|
38 |
+
|
39 |
+
return G
|
40 |
+
|
41 |
+
|
42 |
+
class TestDispersion:
|
43 |
+
def test_article(self):
|
44 |
+
"""our algorithm matches article's"""
|
45 |
+
G = small_ego_G()
|
46 |
+
disp_uh = nx.dispersion(G, "u", "h", normalized=False)
|
47 |
+
disp_ub = nx.dispersion(G, "u", "b", normalized=False)
|
48 |
+
assert disp_uh == 4
|
49 |
+
assert disp_ub == 1
|
50 |
+
|
51 |
+
def test_results_length(self):
|
52 |
+
"""there is a result for every node"""
|
53 |
+
G = small_ego_G()
|
54 |
+
disp = nx.dispersion(G)
|
55 |
+
disp_Gu = nx.dispersion(G, "u")
|
56 |
+
disp_uv = nx.dispersion(G, "u", "h")
|
57 |
+
assert len(disp) == len(G)
|
58 |
+
assert len(disp_Gu) == len(G) - 1
|
59 |
+
assert isinstance(disp_uv, float)
|
60 |
+
|
61 |
+
def test_dispersion_v_only(self):
|
62 |
+
G = small_ego_G()
|
63 |
+
disp_G_h = nx.dispersion(G, v="h", normalized=False)
|
64 |
+
disp_G_h_normalized = nx.dispersion(G, v="h", normalized=True)
|
65 |
+
assert disp_G_h == {"c": 0, "f": 0, "j": 0, "k": 0, "u": 4}
|
66 |
+
assert disp_G_h_normalized == {"c": 0.0, "f": 0.0, "j": 0.0, "k": 0.0, "u": 1.0}
|
67 |
+
|
68 |
+
def test_impossible_things(self):
|
69 |
+
G = nx.karate_club_graph()
|
70 |
+
disp = nx.dispersion(G)
|
71 |
+
for u in disp:
|
72 |
+
for v in disp[u]:
|
73 |
+
assert disp[u][v] >= 0
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_group.py
ADDED
@@ -0,0 +1,278 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Tests for Group Centrality Measures
|
3 |
+
"""
|
4 |
+
|
5 |
+
|
6 |
+
import pytest
|
7 |
+
|
8 |
+
import networkx as nx
|
9 |
+
|
10 |
+
|
11 |
+
class TestGroupBetweennessCentrality:
|
12 |
+
def test_group_betweenness_single_node(self):
|
13 |
+
"""
|
14 |
+
Group betweenness centrality for single node group
|
15 |
+
"""
|
16 |
+
G = nx.path_graph(5)
|
17 |
+
C = [1]
|
18 |
+
b = nx.group_betweenness_centrality(
|
19 |
+
G, C, weight=None, normalized=False, endpoints=False
|
20 |
+
)
|
21 |
+
b_answer = 3.0
|
22 |
+
assert b == b_answer
|
23 |
+
|
24 |
+
def test_group_betweenness_with_endpoints(self):
|
25 |
+
"""
|
26 |
+
Group betweenness centrality for single node group
|
27 |
+
"""
|
28 |
+
G = nx.path_graph(5)
|
29 |
+
C = [1]
|
30 |
+
b = nx.group_betweenness_centrality(
|
31 |
+
G, C, weight=None, normalized=False, endpoints=True
|
32 |
+
)
|
33 |
+
b_answer = 7.0
|
34 |
+
assert b == b_answer
|
35 |
+
|
36 |
+
def test_group_betweenness_normalized(self):
|
37 |
+
"""
|
38 |
+
Group betweenness centrality for group with more than
|
39 |
+
1 node and normalized
|
40 |
+
"""
|
41 |
+
G = nx.path_graph(5)
|
42 |
+
C = [1, 3]
|
43 |
+
b = nx.group_betweenness_centrality(
|
44 |
+
G, C, weight=None, normalized=True, endpoints=False
|
45 |
+
)
|
46 |
+
b_answer = 1.0
|
47 |
+
assert b == b_answer
|
48 |
+
|
49 |
+
def test_two_group_betweenness_value_zero(self):
|
50 |
+
"""
|
51 |
+
Group betweenness centrality value of 0
|
52 |
+
"""
|
53 |
+
G = nx.cycle_graph(7)
|
54 |
+
C = [[0, 1, 6], [0, 1, 5]]
|
55 |
+
b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
|
56 |
+
b_answer = [0.0, 3.0]
|
57 |
+
assert b == b_answer
|
58 |
+
|
59 |
+
def test_group_betweenness_value_zero(self):
|
60 |
+
"""
|
61 |
+
Group betweenness centrality value of 0
|
62 |
+
"""
|
63 |
+
G = nx.cycle_graph(6)
|
64 |
+
C = [0, 1, 5]
|
65 |
+
b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
|
66 |
+
b_answer = 0.0
|
67 |
+
assert b == b_answer
|
68 |
+
|
69 |
+
def test_group_betweenness_disconnected_graph(self):
|
70 |
+
"""
|
71 |
+
Group betweenness centrality in a disconnected graph
|
72 |
+
"""
|
73 |
+
G = nx.path_graph(5)
|
74 |
+
G.remove_edge(0, 1)
|
75 |
+
C = [1]
|
76 |
+
b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
|
77 |
+
b_answer = 0.0
|
78 |
+
assert b == b_answer
|
79 |
+
|
80 |
+
def test_group_betweenness_node_not_in_graph(self):
|
81 |
+
"""
|
82 |
+
Node(s) in C not in graph, raises NodeNotFound exception
|
83 |
+
"""
|
84 |
+
with pytest.raises(nx.NodeNotFound):
|
85 |
+
nx.group_betweenness_centrality(nx.path_graph(5), [4, 7, 8])
|
86 |
+
|
87 |
+
def test_group_betweenness_directed_weighted(self):
|
88 |
+
"""
|
89 |
+
Group betweenness centrality in a directed and weighted graph
|
90 |
+
"""
|
91 |
+
G = nx.DiGraph()
|
92 |
+
G.add_edge(1, 0, weight=1)
|
93 |
+
G.add_edge(0, 2, weight=2)
|
94 |
+
G.add_edge(1, 2, weight=3)
|
95 |
+
G.add_edge(3, 1, weight=4)
|
96 |
+
G.add_edge(2, 3, weight=1)
|
97 |
+
G.add_edge(4, 3, weight=6)
|
98 |
+
G.add_edge(2, 4, weight=7)
|
99 |
+
C = [1, 2]
|
100 |
+
b = nx.group_betweenness_centrality(G, C, weight="weight", normalized=False)
|
101 |
+
b_answer = 5.0
|
102 |
+
assert b == b_answer
|
103 |
+
|
104 |
+
|
105 |
+
class TestProminentGroup:
|
106 |
+
np = pytest.importorskip("numpy")
|
107 |
+
pd = pytest.importorskip("pandas")
|
108 |
+
|
109 |
+
def test_prominent_group_single_node(self):
|
110 |
+
"""
|
111 |
+
Prominent group for single node
|
112 |
+
"""
|
113 |
+
G = nx.path_graph(5)
|
114 |
+
k = 1
|
115 |
+
b, g = nx.prominent_group(G, k, normalized=False, endpoints=False)
|
116 |
+
b_answer, g_answer = 4.0, [2]
|
117 |
+
assert b == b_answer and g == g_answer
|
118 |
+
|
119 |
+
def test_prominent_group_with_c(self):
|
120 |
+
"""
|
121 |
+
Prominent group without some nodes
|
122 |
+
"""
|
123 |
+
G = nx.path_graph(5)
|
124 |
+
k = 1
|
125 |
+
b, g = nx.prominent_group(G, k, normalized=False, C=[2])
|
126 |
+
b_answer, g_answer = 3.0, [1]
|
127 |
+
assert b == b_answer and g == g_answer
|
128 |
+
|
129 |
+
def test_prominent_group_normalized_endpoints(self):
|
130 |
+
"""
|
131 |
+
Prominent group with normalized result, with endpoints
|
132 |
+
"""
|
133 |
+
G = nx.cycle_graph(7)
|
134 |
+
k = 2
|
135 |
+
b, g = nx.prominent_group(G, k, normalized=True, endpoints=True)
|
136 |
+
b_answer, g_answer = 1.7, [2, 5]
|
137 |
+
assert b == b_answer and g == g_answer
|
138 |
+
|
139 |
+
def test_prominent_group_disconnected_graph(self):
|
140 |
+
"""
|
141 |
+
Prominent group of disconnected graph
|
142 |
+
"""
|
143 |
+
G = nx.path_graph(6)
|
144 |
+
G.remove_edge(0, 1)
|
145 |
+
k = 1
|
146 |
+
b, g = nx.prominent_group(G, k, weight=None, normalized=False)
|
147 |
+
b_answer, g_answer = 4.0, [3]
|
148 |
+
assert b == b_answer and g == g_answer
|
149 |
+
|
150 |
+
def test_prominent_group_node_not_in_graph(self):
|
151 |
+
"""
|
152 |
+
Node(s) in C not in graph, raises NodeNotFound exception
|
153 |
+
"""
|
154 |
+
with pytest.raises(nx.NodeNotFound):
|
155 |
+
nx.prominent_group(nx.path_graph(5), 1, C=[10])
|
156 |
+
|
157 |
+
def test_group_betweenness_directed_weighted(self):
|
158 |
+
"""
|
159 |
+
Group betweenness centrality in a directed and weighted graph
|
160 |
+
"""
|
161 |
+
G = nx.DiGraph()
|
162 |
+
G.add_edge(1, 0, weight=1)
|
163 |
+
G.add_edge(0, 2, weight=2)
|
164 |
+
G.add_edge(1, 2, weight=3)
|
165 |
+
G.add_edge(3, 1, weight=4)
|
166 |
+
G.add_edge(2, 3, weight=1)
|
167 |
+
G.add_edge(4, 3, weight=6)
|
168 |
+
G.add_edge(2, 4, weight=7)
|
169 |
+
k = 2
|
170 |
+
b, g = nx.prominent_group(G, k, weight="weight", normalized=False)
|
171 |
+
b_answer, g_answer = 5.0, [1, 2]
|
172 |
+
assert b == b_answer and g == g_answer
|
173 |
+
|
174 |
+
def test_prominent_group_greedy_algorithm(self):
|
175 |
+
"""
|
176 |
+
Group betweenness centrality in a greedy algorithm
|
177 |
+
"""
|
178 |
+
G = nx.cycle_graph(7)
|
179 |
+
k = 2
|
180 |
+
b, g = nx.prominent_group(G, k, normalized=True, endpoints=True, greedy=True)
|
181 |
+
b_answer, g_answer = 1.7, [6, 3]
|
182 |
+
assert b == b_answer and g == g_answer
|
183 |
+
|
184 |
+
|
185 |
+
class TestGroupClosenessCentrality:
|
186 |
+
def test_group_closeness_single_node(self):
|
187 |
+
"""
|
188 |
+
Group closeness centrality for a single node group
|
189 |
+
"""
|
190 |
+
G = nx.path_graph(5)
|
191 |
+
c = nx.group_closeness_centrality(G, [1])
|
192 |
+
c_answer = nx.closeness_centrality(G, 1)
|
193 |
+
assert c == c_answer
|
194 |
+
|
195 |
+
def test_group_closeness_disconnected(self):
|
196 |
+
"""
|
197 |
+
Group closeness centrality for a disconnected graph
|
198 |
+
"""
|
199 |
+
G = nx.Graph()
|
200 |
+
G.add_nodes_from([1, 2, 3, 4])
|
201 |
+
c = nx.group_closeness_centrality(G, [1, 2])
|
202 |
+
c_answer = 0
|
203 |
+
assert c == c_answer
|
204 |
+
|
205 |
+
def test_group_closeness_multiple_node(self):
|
206 |
+
"""
|
207 |
+
Group closeness centrality for a group with more than
|
208 |
+
1 node
|
209 |
+
"""
|
210 |
+
G = nx.path_graph(4)
|
211 |
+
c = nx.group_closeness_centrality(G, [1, 2])
|
212 |
+
c_answer = 1
|
213 |
+
assert c == c_answer
|
214 |
+
|
215 |
+
def test_group_closeness_node_not_in_graph(self):
|
216 |
+
"""
|
217 |
+
Node(s) in S not in graph, raises NodeNotFound exception
|
218 |
+
"""
|
219 |
+
with pytest.raises(nx.NodeNotFound):
|
220 |
+
nx.group_closeness_centrality(nx.path_graph(5), [6, 7, 8])
|
221 |
+
|
222 |
+
|
223 |
+
class TestGroupDegreeCentrality:
|
224 |
+
def test_group_degree_centrality_single_node(self):
|
225 |
+
"""
|
226 |
+
Group degree centrality for a single node group
|
227 |
+
"""
|
228 |
+
G = nx.path_graph(4)
|
229 |
+
d = nx.group_degree_centrality(G, [1])
|
230 |
+
d_answer = nx.degree_centrality(G)[1]
|
231 |
+
assert d == d_answer
|
232 |
+
|
233 |
+
def test_group_degree_centrality_multiple_node(self):
|
234 |
+
"""
|
235 |
+
Group degree centrality for group with more than
|
236 |
+
1 node
|
237 |
+
"""
|
238 |
+
G = nx.Graph()
|
239 |
+
G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
|
240 |
+
G.add_edges_from(
|
241 |
+
[(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
|
242 |
+
)
|
243 |
+
d = nx.group_degree_centrality(G, [1, 2])
|
244 |
+
d_answer = 1
|
245 |
+
assert d == d_answer
|
246 |
+
|
247 |
+
def test_group_in_degree_centrality(self):
|
248 |
+
"""
|
249 |
+
Group in-degree centrality in a DiGraph
|
250 |
+
"""
|
251 |
+
G = nx.DiGraph()
|
252 |
+
G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
|
253 |
+
G.add_edges_from(
|
254 |
+
[(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
|
255 |
+
)
|
256 |
+
d = nx.group_in_degree_centrality(G, [1, 2])
|
257 |
+
d_answer = 0
|
258 |
+
assert d == d_answer
|
259 |
+
|
260 |
+
def test_group_out_degree_centrality(self):
|
261 |
+
"""
|
262 |
+
Group out-degree centrality in a DiGraph
|
263 |
+
"""
|
264 |
+
G = nx.DiGraph()
|
265 |
+
G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
|
266 |
+
G.add_edges_from(
|
267 |
+
[(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
|
268 |
+
)
|
269 |
+
d = nx.group_out_degree_centrality(G, [1, 2])
|
270 |
+
d_answer = 1
|
271 |
+
assert d == d_answer
|
272 |
+
|
273 |
+
def test_group_degree_centrality_node_not_in_graph(self):
|
274 |
+
"""
|
275 |
+
Node(s) in S not in graph, raises NetworkXError
|
276 |
+
"""
|
277 |
+
with pytest.raises(nx.NetworkXError):
|
278 |
+
nx.group_degree_centrality(nx.path_graph(5), [6, 7, 8])
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py
ADDED
@@ -0,0 +1,221 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import networkx as nx
|
4 |
+
|
5 |
+
np = pytest.importorskip("numpy")
|
6 |
+
sp = pytest.importorskip("scipy")
|
7 |
+
|
8 |
+
|
9 |
+
def test_laplacian_centrality_null_graph():
|
10 |
+
G = nx.Graph()
|
11 |
+
with pytest.raises(nx.NetworkXPointlessConcept):
|
12 |
+
d = nx.laplacian_centrality(G, normalized=False)
|
13 |
+
|
14 |
+
|
15 |
+
def test_laplacian_centrality_single_node():
|
16 |
+
"""See gh-6571"""
|
17 |
+
G = nx.empty_graph(1)
|
18 |
+
assert nx.laplacian_centrality(G, normalized=False) == {0: 0}
|
19 |
+
with pytest.raises(ZeroDivisionError):
|
20 |
+
nx.laplacian_centrality(G, normalized=True)
|
21 |
+
|
22 |
+
|
23 |
+
def test_laplacian_centrality_unconnected_nodes():
|
24 |
+
"""laplacian_centrality on a unconnected node graph should return 0
|
25 |
+
|
26 |
+
For graphs without edges, the Laplacian energy is 0 and is unchanged with
|
27 |
+
node removal, so::
|
28 |
+
|
29 |
+
LC(v) = LE(G) - LE(G - v) = 0 - 0 = 0
|
30 |
+
"""
|
31 |
+
G = nx.empty_graph(3)
|
32 |
+
assert nx.laplacian_centrality(G, normalized=False) == {0: 0, 1: 0, 2: 0}
|
33 |
+
|
34 |
+
|
35 |
+
def test_laplacian_centrality_empty_graph():
|
36 |
+
G = nx.empty_graph(3)
|
37 |
+
with pytest.raises(ZeroDivisionError):
|
38 |
+
d = nx.laplacian_centrality(G, normalized=True)
|
39 |
+
|
40 |
+
|
41 |
+
def test_laplacian_centrality_E():
|
42 |
+
E = nx.Graph()
|
43 |
+
E.add_weighted_edges_from(
|
44 |
+
[(0, 1, 4), (4, 5, 1), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2)]
|
45 |
+
)
|
46 |
+
d = nx.laplacian_centrality(E)
|
47 |
+
exact = {
|
48 |
+
0: 0.700000,
|
49 |
+
1: 0.900000,
|
50 |
+
2: 0.280000,
|
51 |
+
3: 0.220000,
|
52 |
+
4: 0.260000,
|
53 |
+
5: 0.040000,
|
54 |
+
}
|
55 |
+
|
56 |
+
for n, dc in d.items():
|
57 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
58 |
+
|
59 |
+
# Check not normalized
|
60 |
+
full_energy = 200
|
61 |
+
dnn = nx.laplacian_centrality(E, normalized=False)
|
62 |
+
for n, dc in dnn.items():
|
63 |
+
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-7)
|
64 |
+
|
65 |
+
# Check unweighted not-normalized version
|
66 |
+
duw_nn = nx.laplacian_centrality(E, normalized=False, weight=None)
|
67 |
+
print(duw_nn)
|
68 |
+
exact_uw_nn = {
|
69 |
+
0: 18,
|
70 |
+
1: 34,
|
71 |
+
2: 18,
|
72 |
+
3: 10,
|
73 |
+
4: 16,
|
74 |
+
5: 6,
|
75 |
+
}
|
76 |
+
for n, dc in duw_nn.items():
|
77 |
+
assert exact_uw_nn[n] == pytest.approx(dc, abs=1e-7)
|
78 |
+
|
79 |
+
# Check unweighted version
|
80 |
+
duw = nx.laplacian_centrality(E, weight=None)
|
81 |
+
full_energy = 42
|
82 |
+
for n, dc in duw.items():
|
83 |
+
assert exact_uw_nn[n] / full_energy == pytest.approx(dc, abs=1e-7)
|
84 |
+
|
85 |
+
|
86 |
+
def test_laplacian_centrality_KC():
|
87 |
+
KC = nx.karate_club_graph()
|
88 |
+
d = nx.laplacian_centrality(KC)
|
89 |
+
exact = {
|
90 |
+
0: 0.2543593,
|
91 |
+
1: 0.1724524,
|
92 |
+
2: 0.2166053,
|
93 |
+
3: 0.0964646,
|
94 |
+
4: 0.0350344,
|
95 |
+
5: 0.0571109,
|
96 |
+
6: 0.0540713,
|
97 |
+
7: 0.0788674,
|
98 |
+
8: 0.1222204,
|
99 |
+
9: 0.0217565,
|
100 |
+
10: 0.0308751,
|
101 |
+
11: 0.0215965,
|
102 |
+
12: 0.0174372,
|
103 |
+
13: 0.118861,
|
104 |
+
14: 0.0366341,
|
105 |
+
15: 0.0548712,
|
106 |
+
16: 0.0172772,
|
107 |
+
17: 0.0191969,
|
108 |
+
18: 0.0225564,
|
109 |
+
19: 0.0331147,
|
110 |
+
20: 0.0279955,
|
111 |
+
21: 0.0246361,
|
112 |
+
22: 0.0382339,
|
113 |
+
23: 0.1294193,
|
114 |
+
24: 0.0227164,
|
115 |
+
25: 0.0644697,
|
116 |
+
26: 0.0281555,
|
117 |
+
27: 0.075188,
|
118 |
+
28: 0.0364742,
|
119 |
+
29: 0.0707087,
|
120 |
+
30: 0.0708687,
|
121 |
+
31: 0.131019,
|
122 |
+
32: 0.2370821,
|
123 |
+
33: 0.3066709,
|
124 |
+
}
|
125 |
+
for n, dc in d.items():
|
126 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
127 |
+
|
128 |
+
# Check not normalized
|
129 |
+
full_energy = 12502
|
130 |
+
dnn = nx.laplacian_centrality(KC, normalized=False)
|
131 |
+
for n, dc in dnn.items():
|
132 |
+
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
|
133 |
+
|
134 |
+
|
135 |
+
def test_laplacian_centrality_K():
|
136 |
+
K = nx.krackhardt_kite_graph()
|
137 |
+
d = nx.laplacian_centrality(K)
|
138 |
+
exact = {
|
139 |
+
0: 0.3010753,
|
140 |
+
1: 0.3010753,
|
141 |
+
2: 0.2258065,
|
142 |
+
3: 0.483871,
|
143 |
+
4: 0.2258065,
|
144 |
+
5: 0.3870968,
|
145 |
+
6: 0.3870968,
|
146 |
+
7: 0.1935484,
|
147 |
+
8: 0.0752688,
|
148 |
+
9: 0.0322581,
|
149 |
+
}
|
150 |
+
for n, dc in d.items():
|
151 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
152 |
+
|
153 |
+
# Check not normalized
|
154 |
+
full_energy = 186
|
155 |
+
dnn = nx.laplacian_centrality(K, normalized=False)
|
156 |
+
for n, dc in dnn.items():
|
157 |
+
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
|
158 |
+
|
159 |
+
|
160 |
+
def test_laplacian_centrality_P3():
|
161 |
+
P3 = nx.path_graph(3)
|
162 |
+
d = nx.laplacian_centrality(P3)
|
163 |
+
exact = {0: 0.6, 1: 1.0, 2: 0.6}
|
164 |
+
for n, dc in d.items():
|
165 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
166 |
+
|
167 |
+
|
168 |
+
def test_laplacian_centrality_K5():
|
169 |
+
K5 = nx.complete_graph(5)
|
170 |
+
d = nx.laplacian_centrality(K5)
|
171 |
+
exact = {0: 0.52, 1: 0.52, 2: 0.52, 3: 0.52, 4: 0.52}
|
172 |
+
for n, dc in d.items():
|
173 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
174 |
+
|
175 |
+
|
176 |
+
def test_laplacian_centrality_FF():
|
177 |
+
FF = nx.florentine_families_graph()
|
178 |
+
d = nx.laplacian_centrality(FF)
|
179 |
+
exact = {
|
180 |
+
"Acciaiuoli": 0.0804598,
|
181 |
+
"Medici": 0.4022989,
|
182 |
+
"Castellani": 0.1724138,
|
183 |
+
"Peruzzi": 0.183908,
|
184 |
+
"Strozzi": 0.2528736,
|
185 |
+
"Barbadori": 0.137931,
|
186 |
+
"Ridolfi": 0.2183908,
|
187 |
+
"Tornabuoni": 0.2183908,
|
188 |
+
"Albizzi": 0.1954023,
|
189 |
+
"Salviati": 0.1149425,
|
190 |
+
"Pazzi": 0.0344828,
|
191 |
+
"Bischeri": 0.1954023,
|
192 |
+
"Guadagni": 0.2298851,
|
193 |
+
"Ginori": 0.045977,
|
194 |
+
"Lamberteschi": 0.0574713,
|
195 |
+
}
|
196 |
+
for n, dc in d.items():
|
197 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
198 |
+
|
199 |
+
|
200 |
+
def test_laplacian_centrality_DG():
|
201 |
+
DG = nx.DiGraph([(0, 5), (1, 5), (2, 5), (3, 5), (4, 5), (5, 6), (5, 7), (5, 8)])
|
202 |
+
d = nx.laplacian_centrality(DG)
|
203 |
+
exact = {
|
204 |
+
0: 0.2123352,
|
205 |
+
5: 0.515391,
|
206 |
+
1: 0.2123352,
|
207 |
+
2: 0.2123352,
|
208 |
+
3: 0.2123352,
|
209 |
+
4: 0.2123352,
|
210 |
+
6: 0.2952031,
|
211 |
+
7: 0.2952031,
|
212 |
+
8: 0.2952031,
|
213 |
+
}
|
214 |
+
for n, dc in d.items():
|
215 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
216 |
+
|
217 |
+
# Check not normalized
|
218 |
+
full_energy = 9.50704
|
219 |
+
dnn = nx.laplacian_centrality(DG, normalized=False)
|
220 |
+
for n, dc in dnn.items():
|
221 |
+
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-4)
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_reaching.py
ADDED
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Unit tests for the :mod:`networkx.algorithms.centrality.reaching` module."""
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import networkx as nx
|
5 |
+
|
6 |
+
|
7 |
+
class TestGlobalReachingCentrality:
|
8 |
+
"""Unit tests for the global reaching centrality function."""
|
9 |
+
|
10 |
+
def test_non_positive_weights(self):
|
11 |
+
with pytest.raises(nx.NetworkXError):
|
12 |
+
G = nx.DiGraph()
|
13 |
+
nx.global_reaching_centrality(G, weight="weight")
|
14 |
+
|
15 |
+
def test_negatively_weighted(self):
|
16 |
+
with pytest.raises(nx.NetworkXError):
|
17 |
+
G = nx.Graph()
|
18 |
+
G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)])
|
19 |
+
nx.global_reaching_centrality(G, weight="weight")
|
20 |
+
|
21 |
+
def test_directed_star(self):
|
22 |
+
G = nx.DiGraph()
|
23 |
+
G.add_weighted_edges_from([(1, 2, 0.5), (1, 3, 0.5)])
|
24 |
+
grc = nx.global_reaching_centrality
|
25 |
+
assert grc(G, normalized=False, weight="weight") == 0.5
|
26 |
+
assert grc(G) == 1
|
27 |
+
|
28 |
+
def test_undirected_unweighted_star(self):
|
29 |
+
G = nx.star_graph(2)
|
30 |
+
grc = nx.global_reaching_centrality
|
31 |
+
assert grc(G, normalized=False, weight=None) == 0.25
|
32 |
+
|
33 |
+
def test_undirected_weighted_star(self):
|
34 |
+
G = nx.Graph()
|
35 |
+
G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)])
|
36 |
+
grc = nx.global_reaching_centrality
|
37 |
+
assert grc(G, normalized=False, weight="weight") == 0.375
|
38 |
+
|
39 |
+
def test_cycle_directed_unweighted(self):
|
40 |
+
G = nx.DiGraph()
|
41 |
+
G.add_edge(1, 2)
|
42 |
+
G.add_edge(2, 1)
|
43 |
+
assert nx.global_reaching_centrality(G, weight=None) == 0
|
44 |
+
|
45 |
+
def test_cycle_undirected_unweighted(self):
|
46 |
+
G = nx.Graph()
|
47 |
+
G.add_edge(1, 2)
|
48 |
+
assert nx.global_reaching_centrality(G, weight=None) == 0
|
49 |
+
|
50 |
+
def test_cycle_directed_weighted(self):
|
51 |
+
G = nx.DiGraph()
|
52 |
+
G.add_weighted_edges_from([(1, 2, 1), (2, 1, 1)])
|
53 |
+
assert nx.global_reaching_centrality(G) == 0
|
54 |
+
|
55 |
+
def test_cycle_undirected_weighted(self):
|
56 |
+
G = nx.Graph()
|
57 |
+
G.add_edge(1, 2, weight=1)
|
58 |
+
grc = nx.global_reaching_centrality
|
59 |
+
assert grc(G, normalized=False) == 0
|
60 |
+
|
61 |
+
def test_directed_weighted(self):
|
62 |
+
G = nx.DiGraph()
|
63 |
+
G.add_edge("A", "B", weight=5)
|
64 |
+
G.add_edge("B", "C", weight=1)
|
65 |
+
G.add_edge("B", "D", weight=0.25)
|
66 |
+
G.add_edge("D", "E", weight=1)
|
67 |
+
|
68 |
+
denom = len(G) - 1
|
69 |
+
A_local = sum([5, 3, 2.625, 2.0833333333333]) / denom
|
70 |
+
B_local = sum([1, 0.25, 0.625]) / denom
|
71 |
+
C_local = 0
|
72 |
+
D_local = sum([1]) / denom
|
73 |
+
E_local = 0
|
74 |
+
|
75 |
+
local_reach_ctrs = [A_local, C_local, B_local, D_local, E_local]
|
76 |
+
max_local = max(local_reach_ctrs)
|
77 |
+
expected = sum(max_local - lrc for lrc in local_reach_ctrs) / denom
|
78 |
+
grc = nx.global_reaching_centrality
|
79 |
+
actual = grc(G, normalized=False, weight="weight")
|
80 |
+
assert expected == pytest.approx(actual, abs=1e-7)
|
81 |
+
|
82 |
+
|
83 |
+
class TestLocalReachingCentrality:
|
84 |
+
"""Unit tests for the local reaching centrality function."""
|
85 |
+
|
86 |
+
def test_non_positive_weights(self):
|
87 |
+
with pytest.raises(nx.NetworkXError):
|
88 |
+
G = nx.DiGraph()
|
89 |
+
G.add_weighted_edges_from([(0, 1, 0)])
|
90 |
+
nx.local_reaching_centrality(G, 0, weight="weight")
|
91 |
+
|
92 |
+
def test_negatively_weighted(self):
|
93 |
+
with pytest.raises(nx.NetworkXError):
|
94 |
+
G = nx.Graph()
|
95 |
+
G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)])
|
96 |
+
nx.local_reaching_centrality(G, 0, weight="weight")
|
97 |
+
|
98 |
+
def test_undirected_unweighted_star(self):
|
99 |
+
G = nx.star_graph(2)
|
100 |
+
grc = nx.local_reaching_centrality
|
101 |
+
assert grc(G, 1, weight=None, normalized=False) == 0.75
|
102 |
+
|
103 |
+
def test_undirected_weighted_star(self):
|
104 |
+
G = nx.Graph()
|
105 |
+
G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)])
|
106 |
+
centrality = nx.local_reaching_centrality(
|
107 |
+
G, 1, normalized=False, weight="weight"
|
108 |
+
)
|
109 |
+
assert centrality == 1.5
|
110 |
+
|
111 |
+
def test_undirected_weighted_normalized(self):
|
112 |
+
G = nx.Graph()
|
113 |
+
G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)])
|
114 |
+
centrality = nx.local_reaching_centrality(
|
115 |
+
G, 1, normalized=True, weight="weight"
|
116 |
+
)
|
117 |
+
assert centrality == 1.0
|
venv/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Tests for second order centrality.
|
3 |
+
"""
|
4 |
+
|
5 |
+
import pytest
|
6 |
+
|
7 |
+
pytest.importorskip("numpy")
|
8 |
+
pytest.importorskip("scipy")
|
9 |
+
|
10 |
+
import networkx as nx
|
11 |
+
|
12 |
+
|
13 |
+
def test_empty():
|
14 |
+
with pytest.raises(nx.NetworkXException):
|
15 |
+
G = nx.empty_graph()
|
16 |
+
nx.second_order_centrality(G)
|
17 |
+
|
18 |
+
|
19 |
+
def test_non_connected():
|
20 |
+
with pytest.raises(nx.NetworkXException):
|
21 |
+
G = nx.Graph()
|
22 |
+
G.add_node(0)
|
23 |
+
G.add_node(1)
|
24 |
+
nx.second_order_centrality(G)
|
25 |
+
|
26 |
+
|
27 |
+
def test_non_negative_edge_weights():
|
28 |
+
with pytest.raises(nx.NetworkXException):
|
29 |
+
G = nx.path_graph(2)
|
30 |
+
G.add_edge(0, 1, weight=-1)
|
31 |
+
nx.second_order_centrality(G)
|
32 |
+
|
33 |
+
|
34 |
+
def test_weight_attribute():
|
35 |
+
G = nx.Graph()
|
36 |
+
G.add_weighted_edges_from([(0, 1, 1.0), (1, 2, 3.5)], weight="w")
|
37 |
+
expected = {0: 3.431, 1: 3.082, 2: 5.612}
|
38 |
+
b = nx.second_order_centrality(G, weight="w")
|
39 |
+
|
40 |
+
for n in sorted(G):
|
41 |
+
assert b[n] == pytest.approx(expected[n], abs=1e-2)
|
42 |
+
|
43 |
+
|
44 |
+
def test_one_node_graph():
|
45 |
+
"""Second order centrality: single node"""
|
46 |
+
G = nx.Graph()
|
47 |
+
G.add_node(0)
|
48 |
+
G.add_edge(0, 0)
|
49 |
+
assert nx.second_order_centrality(G)[0] == 0
|
50 |
+
|
51 |
+
|
52 |
+
def test_P3():
|
53 |
+
"""Second order centrality: line graph, as defined in paper"""
|
54 |
+
G = nx.path_graph(3)
|
55 |
+
b_answer = {0: 3.741, 1: 1.414, 2: 3.741}
|
56 |
+
|
57 |
+
b = nx.second_order_centrality(G)
|
58 |
+
|
59 |
+
for n in sorted(G):
|
60 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-2)
|
61 |
+
|
62 |
+
|
63 |
+
def test_K3():
|
64 |
+
"""Second order centrality: complete graph, as defined in paper"""
|
65 |
+
G = nx.complete_graph(3)
|
66 |
+
b_answer = {0: 1.414, 1: 1.414, 2: 1.414}
|
67 |
+
|
68 |
+
b = nx.second_order_centrality(G)
|
69 |
+
|
70 |
+
for n in sorted(G):
|
71 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-2)
|
72 |
+
|
73 |
+
|
74 |
+
def test_ring_graph():
|
75 |
+
"""Second order centrality: ring graph, as defined in paper"""
|
76 |
+
G = nx.cycle_graph(5)
|
77 |
+
b_answer = {0: 4.472, 1: 4.472, 2: 4.472, 3: 4.472, 4: 4.472}
|
78 |
+
|
79 |
+
b = nx.second_order_centrality(G)
|
80 |
+
|
81 |
+
for n in sorted(G):
|
82 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-2)
|