diff --git a/ckpts/universal/global_step80/zero/15.input_layernorm.weight/exp_avg.pt b/ckpts/universal/global_step80/zero/15.input_layernorm.weight/exp_avg.pt new file mode 100644 index 0000000000000000000000000000000000000000..ab738599607e8279705b8c7272105fd2889fbc62 --- /dev/null +++ b/ckpts/universal/global_step80/zero/15.input_layernorm.weight/exp_avg.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d03f5cc7d1db3d15cf919ab6fba361ee207dd5c5712b31271caf19bc2d00e779 +size 9372 diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/__init__.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e39dc00aa250b05cbd8f0ce9b38cf32ecc752946 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/__init__.py @@ -0,0 +1,24 @@ +"""Approximations of graph properties and Heuristic methods for optimization. + +The functions in this class are not imported into the top-level ``networkx`` +namespace so the easiest way to use them is with:: + + >>> from networkx.algorithms import approximation + +Another option is to import the specific function with +``from networkx.algorithms.approximation import function_name``. + +""" +from networkx.algorithms.approximation.clustering_coefficient import * +from networkx.algorithms.approximation.clique import * +from networkx.algorithms.approximation.connectivity import * +from networkx.algorithms.approximation.distance_measures import * +from networkx.algorithms.approximation.dominating_set import * +from networkx.algorithms.approximation.kcomponents import * +from networkx.algorithms.approximation.matching import * +from networkx.algorithms.approximation.ramsey import * +from networkx.algorithms.approximation.steinertree import * +from networkx.algorithms.approximation.traveling_salesman import * +from networkx.algorithms.approximation.treewidth import * +from networkx.algorithms.approximation.vertex_cover import * +from networkx.algorithms.approximation.maxcut import * diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/clique.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/clique.py new file mode 100644 index 0000000000000000000000000000000000000000..56443068633f80fa4223afa9ceb88f64c1614faa --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/clique.py @@ -0,0 +1,258 @@ +"""Functions for computing large cliques and maximum independent sets.""" +import networkx as nx +from networkx.algorithms.approximation import ramsey +from networkx.utils import not_implemented_for + +__all__ = [ + "clique_removal", + "max_clique", + "large_clique_size", + "maximum_independent_set", +] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def maximum_independent_set(G): + """Returns an approximate maximum independent set. + + Independent set or stable set is a set of vertices in a graph, no two of + which are adjacent. That is, it is a set I of vertices such that for every + two vertices in I, there is no edge connecting the two. Equivalently, each + edge in the graph has at most one endpoint in I. The size of an independent + set is the number of vertices it contains [1]_. + + A maximum independent set is a largest independent set for a given graph G + and its size is denoted $\\alpha(G)$. The problem of finding such a set is called + the maximum independent set problem and is an NP-hard optimization problem. + As such, it is unlikely that there exists an efficient algorithm for finding + a maximum independent set of a graph. + + The Independent Set algorithm is based on [2]_. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + iset : Set + The apx-maximum independent set + + Examples + -------- + >>> G = nx.path_graph(10) + >>> nx.approximation.maximum_independent_set(G) + {0, 2, 4, 6, 9} + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + Notes + ----- + Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case. + + References + ---------- + .. [1] `Wikipedia: Independent set + `_ + .. [2] Boppana, R., & Halldórsson, M. M. (1992). + Approximating maximum independent sets by excluding subgraphs. + BIT Numerical Mathematics, 32(2), 180–196. Springer. + """ + iset, _ = clique_removal(G) + return iset + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def max_clique(G): + r"""Find the Maximum Clique + + Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set + in the worst case. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + clique : set + The apx-maximum clique of the graph + + Examples + -------- + >>> G = nx.path_graph(10) + >>> nx.approximation.max_clique(G) + {8, 9} + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + Notes + ----- + A clique in an undirected graph G = (V, E) is a subset of the vertex set + `C \subseteq V` such that for every two vertices in C there exists an edge + connecting the two. This is equivalent to saying that the subgraph + induced by C is complete (in some cases, the term clique may also refer + to the subgraph). + + A maximum clique is a clique of the largest possible size in a given graph. + The clique number `\omega(G)` of a graph G is the number of + vertices in a maximum clique in G. The intersection number of + G is the smallest number of cliques that together cover all edges of G. + + https://en.wikipedia.org/wiki/Maximum_clique + + References + ---------- + .. [1] Boppana, R., & Halldórsson, M. M. (1992). + Approximating maximum independent sets by excluding subgraphs. + BIT Numerical Mathematics, 32(2), 180–196. Springer. + doi:10.1007/BF01994876 + """ + # finding the maximum clique in a graph is equivalent to finding + # the independent set in the complementary graph + cgraph = nx.complement(G) + iset, _ = clique_removal(cgraph) + return iset + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def clique_removal(G): + r"""Repeatedly remove cliques from the graph. + + Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique + and independent set. Returns the largest independent set found, along + with found maximal cliques. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + max_ind_cliques : (set, list) tuple + 2-tuple of Maximal Independent Set and list of maximal cliques (sets). + + Examples + -------- + >>> G = nx.path_graph(10) + >>> nx.approximation.clique_removal(G) + ({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}]) + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Boppana, R., & Halldórsson, M. M. (1992). + Approximating maximum independent sets by excluding subgraphs. + BIT Numerical Mathematics, 32(2), 180–196. Springer. + """ + graph = G.copy() + c_i, i_i = ramsey.ramsey_R2(graph) + cliques = [c_i] + isets = [i_i] + while graph: + graph.remove_nodes_from(c_i) + c_i, i_i = ramsey.ramsey_R2(graph) + if c_i: + cliques.append(c_i) + if i_i: + isets.append(i_i) + # Determine the largest independent set as measured by cardinality. + maxiset = max(isets, key=len) + return maxiset, cliques + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def large_clique_size(G): + """Find the size of a large clique in a graph. + + A *clique* is a subset of nodes in which each pair of nodes is + adjacent. This function is a heuristic for finding the size of a + large clique in the graph. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + k: integer + The size of a large clique in the graph. + + Examples + -------- + >>> G = nx.path_graph(10) + >>> nx.approximation.large_clique_size(G) + 2 + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + Notes + ----- + This implementation is from [1]_. Its worst case time complexity is + :math:`O(n d^2)`, where *n* is the number of nodes in the graph and + *d* is the maximum degree. + + This function is a heuristic, which means it may work well in + practice, but there is no rigorous mathematical guarantee on the + ratio between the returned number and the actual largest clique size + in the graph. + + References + ---------- + .. [1] Pattabiraman, Bharath, et al. + "Fast Algorithms for the Maximum Clique Problem on Massive Graphs + with Applications to Overlapping Community Detection." + *Internet Mathematics* 11.4-5 (2015): 421--448. + + + See also + -------- + + :func:`networkx.algorithms.approximation.clique.max_clique` + A function that returns an approximate maximum clique with a + guarantee on the approximation ratio. + + :mod:`networkx.algorithms.clique` + Functions for finding the exact maximum clique in a graph. + + """ + degrees = G.degree + + def _clique_heuristic(G, U, size, best_size): + if not U: + return max(best_size, size) + u = max(U, key=degrees) + U.remove(u) + N_prime = {v for v in G[u] if degrees[v] >= best_size} + return _clique_heuristic(G, U & N_prime, size + 1, best_size) + + best_size = 0 + nodes = (u for u in G if degrees[u] >= best_size) + for u in nodes: + neighbors = {v for v in G[u] if degrees[v] >= best_size} + best_size = _clique_heuristic(G, neighbors, 1, best_size) + return best_size diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/connectivity.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/connectivity.py new file mode 100644 index 0000000000000000000000000000000000000000..a2214ed128b808196f2700435f03679fc16a8aac --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/connectivity.py @@ -0,0 +1,412 @@ +""" Fast approximation for node connectivity +""" +import itertools +from operator import itemgetter + +import networkx as nx + +__all__ = [ + "local_node_connectivity", + "node_connectivity", + "all_pairs_node_connectivity", +] + + +@nx._dispatchable(name="approximate_local_node_connectivity") +def local_node_connectivity(G, source, target, cutoff=None): + """Compute node connectivity between source and target. + + Pairwise or local node connectivity between two distinct and nonadjacent + nodes is the minimum number of nodes that must be removed (minimum + separating cutset) to disconnect them. By Menger's theorem, this is equal + to the number of node independent paths (paths that share no nodes other + than source and target). Which is what we compute in this function. + + This algorithm is a fast approximation that gives an strict lower + bound on the actual number of node independent paths between two nodes [1]_. + It works for both directed and undirected graphs. + + Parameters + ---------- + + G : NetworkX graph + + source : node + Starting node for node connectivity + + target : node + Ending node for node connectivity + + cutoff : integer + Maximum node connectivity to consider. If None, the minimum degree + of source or target is used as a cutoff. Default value None. + + Returns + ------- + k: integer + pairwise node connectivity + + Examples + -------- + >>> # Platonic octahedral graph has node connectivity 4 + >>> # for each non adjacent node pair + >>> from networkx.algorithms import approximation as approx + >>> G = nx.octahedral_graph() + >>> approx.local_node_connectivity(G, 0, 5) + 4 + + Notes + ----- + This algorithm [1]_ finds node independents paths between two nodes by + computing their shortest path using BFS, marking the nodes of the path + found as 'used' and then searching other shortest paths excluding the + nodes marked as used until no more paths exist. It is not exact because + a shortest path could use nodes that, if the path were longer, may belong + to two different node independent paths. Thus it only guarantees an + strict lower bound on node connectivity. + + Note that the authors propose a further refinement, losing accuracy and + gaining speed, which is not implemented yet. + + See also + -------- + all_pairs_node_connectivity + node_connectivity + + References + ---------- + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + http://eclectic.ss.uci.edu/~drwhite/working.pdf + + """ + if target == source: + raise nx.NetworkXError("source and target have to be different nodes.") + + # Maximum possible node independent paths + if G.is_directed(): + possible = min(G.out_degree(source), G.in_degree(target)) + else: + possible = min(G.degree(source), G.degree(target)) + + K = 0 + if not possible: + return K + + if cutoff is None: + cutoff = float("inf") + + exclude = set() + for i in range(min(possible, cutoff)): + try: + path = _bidirectional_shortest_path(G, source, target, exclude) + exclude.update(set(path)) + K += 1 + except nx.NetworkXNoPath: + break + + return K + + +@nx._dispatchable(name="approximate_node_connectivity") +def node_connectivity(G, s=None, t=None): + r"""Returns an approximation for node connectivity for a graph or digraph G. + + Node connectivity is equal to the minimum number of nodes that + must be removed to disconnect G or render it trivial. By Menger's theorem, + this is equal to the number of node independent paths (paths that + share no nodes other than source and target). + + If source and target nodes are provided, this function returns the + local node connectivity: the minimum number of nodes that must be + removed to break all paths from source to target in G. + + This algorithm is based on a fast approximation that gives an strict lower + bound on the actual number of node independent paths between two nodes [1]_. + It works for both directed and undirected graphs. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + s : node + Source node. Optional. Default value: None. + + t : node + Target node. Optional. Default value: None. + + Returns + ------- + K : integer + Node connectivity of G, or local node connectivity if source + and target are provided. + + Examples + -------- + >>> # Platonic octahedral graph is 4-node-connected + >>> from networkx.algorithms import approximation as approx + >>> G = nx.octahedral_graph() + >>> approx.node_connectivity(G) + 4 + + Notes + ----- + This algorithm [1]_ finds node independents paths between two nodes by + computing their shortest path using BFS, marking the nodes of the path + found as 'used' and then searching other shortest paths excluding the + nodes marked as used until no more paths exist. It is not exact because + a shortest path could use nodes that, if the path were longer, may belong + to two different node independent paths. Thus it only guarantees an + strict lower bound on node connectivity. + + See also + -------- + all_pairs_node_connectivity + local_node_connectivity + + References + ---------- + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + http://eclectic.ss.uci.edu/~drwhite/working.pdf + + """ + if (s is not None and t is None) or (s is None and t is not None): + raise nx.NetworkXError("Both source and target must be specified.") + + # Local node connectivity + if s is not None and t is not None: + if s not in G: + raise nx.NetworkXError(f"node {s} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {t} not in graph") + return local_node_connectivity(G, s, t) + + # Global node connectivity + if G.is_directed(): + connected_func = nx.is_weakly_connected + iter_func = itertools.permutations + + def neighbors(v): + return itertools.chain(G.predecessors(v), G.successors(v)) + + else: + connected_func = nx.is_connected + iter_func = itertools.combinations + neighbors = G.neighbors + + if not connected_func(G): + return 0 + + # Choose a node with minimum degree + v, minimum_degree = min(G.degree(), key=itemgetter(1)) + # Node connectivity is bounded by minimum degree + K = minimum_degree + # compute local node connectivity with all non-neighbors nodes + # and store the minimum + for w in set(G) - set(neighbors(v)) - {v}: + K = min(K, local_node_connectivity(G, v, w, cutoff=K)) + # Same for non adjacent pairs of neighbors of v + for x, y in iter_func(neighbors(v), 2): + if y not in G[x] and x != y: + K = min(K, local_node_connectivity(G, x, y, cutoff=K)) + return K + + +@nx._dispatchable(name="approximate_all_pairs_node_connectivity") +def all_pairs_node_connectivity(G, nbunch=None, cutoff=None): + """Compute node connectivity between all pairs of nodes. + + Pairwise or local node connectivity between two distinct and nonadjacent + nodes is the minimum number of nodes that must be removed (minimum + separating cutset) to disconnect them. By Menger's theorem, this is equal + to the number of node independent paths (paths that share no nodes other + than source and target). Which is what we compute in this function. + + This algorithm is a fast approximation that gives an strict lower + bound on the actual number of node independent paths between two nodes [1]_. + It works for both directed and undirected graphs. + + + Parameters + ---------- + G : NetworkX graph + + nbunch: container + Container of nodes. If provided node connectivity will be computed + only over pairs of nodes in nbunch. + + cutoff : integer + Maximum node connectivity to consider. If None, the minimum degree + of source or target is used as a cutoff in each pair of nodes. + Default value None. + + Returns + ------- + K : dictionary + Dictionary, keyed by source and target, of pairwise node connectivity + + Examples + -------- + A 3 node cycle with one extra node attached has connectivity 2 between all + nodes in the cycle and connectivity 1 between the extra node and the rest: + + >>> G = nx.cycle_graph(3) + >>> G.add_edge(2, 3) + >>> import pprint # for nice dictionary formatting + >>> pprint.pprint(nx.all_pairs_node_connectivity(G)) + {0: {1: 2, 2: 2, 3: 1}, + 1: {0: 2, 2: 2, 3: 1}, + 2: {0: 2, 1: 2, 3: 1}, + 3: {0: 1, 1: 1, 2: 1}} + + See Also + -------- + local_node_connectivity + node_connectivity + + References + ---------- + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + http://eclectic.ss.uci.edu/~drwhite/working.pdf + """ + if nbunch is None: + nbunch = G + else: + nbunch = set(nbunch) + + directed = G.is_directed() + if directed: + iter_func = itertools.permutations + else: + iter_func = itertools.combinations + + all_pairs = {n: {} for n in nbunch} + + for u, v in iter_func(nbunch, 2): + k = local_node_connectivity(G, u, v, cutoff=cutoff) + all_pairs[u][v] = k + if not directed: + all_pairs[v][u] = k + + return all_pairs + + +def _bidirectional_shortest_path(G, source, target, exclude): + """Returns shortest path between source and target ignoring nodes in the + container 'exclude'. + + Parameters + ---------- + + G : NetworkX graph + + source : node + Starting node for path + + target : node + Ending node for path + + exclude: container + Container for nodes to exclude from the search for shortest paths + + Returns + ------- + path: list + Shortest path between source and target ignoring nodes in 'exclude' + + Raises + ------ + NetworkXNoPath + If there is no path or if nodes are adjacent and have only one path + between them + + Notes + ----- + This function and its helper are originally from + networkx.algorithms.shortest_paths.unweighted and are modified to + accept the extra parameter 'exclude', which is a container for nodes + already used in other paths that should be ignored. + + References + ---------- + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + http://eclectic.ss.uci.edu/~drwhite/working.pdf + + """ + # call helper to do the real work + results = _bidirectional_pred_succ(G, source, target, exclude) + pred, succ, w = results + + # build path from pred+w+succ + path = [] + # from source to w + while w is not None: + path.append(w) + w = pred[w] + path.reverse() + # from w to target + w = succ[path[-1]] + while w is not None: + path.append(w) + w = succ[w] + + return path + + +def _bidirectional_pred_succ(G, source, target, exclude): + # does BFS from both source and target and meets in the middle + # excludes nodes in the container "exclude" from the search + + # handle either directed or undirected + if G.is_directed(): + Gpred = G.predecessors + Gsucc = G.successors + else: + Gpred = G.neighbors + Gsucc = G.neighbors + + # predecessor and successors in search + pred = {source: None} + succ = {target: None} + + # initialize fringes, start with forward + forward_fringe = [source] + reverse_fringe = [target] + + level = 0 + + while forward_fringe and reverse_fringe: + # Make sure that we iterate one step forward and one step backwards + # thus source and target will only trigger "found path" when they are + # adjacent and then they can be safely included in the container 'exclude' + level += 1 + if level % 2 != 0: + this_level = forward_fringe + forward_fringe = [] + for v in this_level: + for w in Gsucc(v): + if w in exclude: + continue + if w not in pred: + forward_fringe.append(w) + pred[w] = v + if w in succ: + return pred, succ, w # found path + else: + this_level = reverse_fringe + reverse_fringe = [] + for v in this_level: + for w in Gpred(v): + if w in exclude: + continue + if w not in succ: + succ[w] = v + reverse_fringe.append(w) + if w in pred: + return pred, succ, w # found path + + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/distance_measures.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/distance_measures.py new file mode 100644 index 0000000000000000000000000000000000000000..d5847e65a2a401cd607436297fe4c1bbc81db3d9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/distance_measures.py @@ -0,0 +1,150 @@ +"""Distance measures approximated metrics.""" + +import networkx as nx +from networkx.utils.decorators import py_random_state + +__all__ = ["diameter"] + + +@py_random_state(1) +@nx._dispatchable(name="approximate_diameter") +def diameter(G, seed=None): + """Returns a lower bound on the diameter of the graph G. + + The function computes a lower bound on the diameter (i.e., the maximum eccentricity) + of a directed or undirected graph G. The procedure used varies depending on the graph + being directed or not. + + If G is an `undirected` graph, then the function uses the `2-sweep` algorithm [1]_. + The main idea is to pick the farthest node from a random node and return its eccentricity. + + Otherwise, if G is a `directed` graph, the function uses the `2-dSweep` algorithm [2]_, + The procedure starts by selecting a random source node $s$ from which it performs a + forward and a backward BFS. Let $a_1$ and $a_2$ be the farthest nodes in the forward and + backward cases, respectively. Then, it computes the backward eccentricity of $a_1$ using + a backward BFS and the forward eccentricity of $a_2$ using a forward BFS. + Finally, it returns the best lower bound between the two. + + In both cases, the time complexity is linear with respect to the size of G. + + Parameters + ---------- + G : NetworkX graph + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + d : integer + Lower Bound on the Diameter of G + + Examples + -------- + >>> G = nx.path_graph(10) # undirected graph + >>> nx.diameter(G) + 9 + >>> G = nx.cycle_graph(3, create_using=nx.DiGraph) # directed graph + >>> nx.diameter(G) + 2 + + Raises + ------ + NetworkXError + If the graph is empty or + If the graph is undirected and not connected or + If the graph is directed and not strongly connected. + + See Also + -------- + networkx.algorithms.distance_measures.diameter + + References + ---------- + .. [1] Magnien, Clémence, Matthieu Latapy, and Michel Habib. + *Fast computation of empirically tight bounds for the diameter of massive graphs.* + Journal of Experimental Algorithmics (JEA), 2009. + https://arxiv.org/pdf/0904.2728.pdf + .. [2] Crescenzi, Pierluigi, Roberto Grossi, Leonardo Lanzi, and Andrea Marino. + *On computing the diameter of real-world directed (weighted) graphs.* + International Symposium on Experimental Algorithms. Springer, Berlin, Heidelberg, 2012. + https://courses.cs.ut.ee/MTAT.03.238/2014_fall/uploads/Main/diameter.pdf + """ + # if G is empty + if not G: + raise nx.NetworkXError("Expected non-empty NetworkX graph!") + # if there's only a node + if G.number_of_nodes() == 1: + return 0 + # if G is directed + if G.is_directed(): + return _two_sweep_directed(G, seed) + # else if G is undirected + return _two_sweep_undirected(G, seed) + + +def _two_sweep_undirected(G, seed): + """Helper function for finding a lower bound on the diameter + for undirected Graphs. + + The idea is to pick the farthest node from a random node + and return its eccentricity. + + ``G`` is a NetworkX undirected graph. + + .. note:: + + ``seed`` is a random.Random or numpy.random.RandomState instance + """ + # select a random source node + source = seed.choice(list(G)) + # get the distances to the other nodes + distances = nx.shortest_path_length(G, source) + # if some nodes have not been visited, then the graph is not connected + if len(distances) != len(G): + raise nx.NetworkXError("Graph not connected.") + # take a node that is (one of) the farthest nodes from the source + *_, node = distances + # return the eccentricity of the node + return nx.eccentricity(G, node) + + +def _two_sweep_directed(G, seed): + """Helper function for finding a lower bound on the diameter + for directed Graphs. + + It implements 2-dSweep, the directed version of the 2-sweep algorithm. + The algorithm follows the following steps. + 1. Select a source node $s$ at random. + 2. Perform a forward BFS from $s$ to select a node $a_1$ at the maximum + distance from the source, and compute $LB_1$, the backward eccentricity of $a_1$. + 3. Perform a backward BFS from $s$ to select a node $a_2$ at the maximum + distance from the source, and compute $LB_2$, the forward eccentricity of $a_2$. + 4. Return the maximum between $LB_1$ and $LB_2$. + + ``G`` is a NetworkX directed graph. + + .. note:: + + ``seed`` is a random.Random or numpy.random.RandomState instance + """ + # get a new digraph G' with the edges reversed in the opposite direction + G_reversed = G.reverse() + # select a random source node + source = seed.choice(list(G)) + # compute forward distances from source + forward_distances = nx.shortest_path_length(G, source) + # compute backward distances from source + backward_distances = nx.shortest_path_length(G_reversed, source) + # if either the source can't reach every node or not every node + # can reach the source, then the graph is not strongly connected + n = len(G) + if len(forward_distances) != n or len(backward_distances) != n: + raise nx.NetworkXError("DiGraph not strongly connected.") + # take a node a_1 at the maximum distance from the source in G + *_, a_1 = forward_distances + # take a node a_2 at the maximum distance from the source in G_reversed + *_, a_2 = backward_distances + # return the max between the backward eccentricity of a_1 and the forward eccentricity of a_2 + return max(nx.eccentricity(G_reversed, a_1), nx.eccentricity(G, a_2)) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/dominating_set.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/dominating_set.py new file mode 100644 index 0000000000000000000000000000000000000000..06ab97d97612bf6ecf093661648f06db14ada539 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/dominating_set.py @@ -0,0 +1,148 @@ +"""Functions for finding node and edge dominating sets. + +A `dominating set`_ for an undirected graph *G* with vertex set *V* +and edge set *E* is a subset *D* of *V* such that every vertex not in +*D* is adjacent to at least one member of *D*. An `edge dominating set`_ +is a subset *F* of *E* such that every edge not in *F* is +incident to an endpoint of at least one edge in *F*. + +.. _dominating set: https://en.wikipedia.org/wiki/Dominating_set +.. _edge dominating set: https://en.wikipedia.org/wiki/Edge_dominating_set + +""" +import networkx as nx + +from ...utils import not_implemented_for +from ..matching import maximal_matching + +__all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"] + + +# TODO Why doesn't this algorithm work for directed graphs? +@not_implemented_for("directed") +@nx._dispatchable(node_attrs="weight") +def min_weighted_dominating_set(G, weight=None): + r"""Returns a dominating set that approximates the minimum weight node + dominating set. + + Parameters + ---------- + G : NetworkX graph + Undirected graph. + + weight : string + The node attribute storing the weight of an node. If provided, + the node attribute with this key must be a number for each + node. If not provided, each node is assumed to have weight one. + + Returns + ------- + min_weight_dominating_set : set + A set of nodes, the sum of whose weights is no more than `(\log + w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of + each node in the graph and `w(V^*)` denotes the sum of the + weights of each node in the minimum weight dominating set. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 4), (1, 4), (1, 2), (2, 3), (3, 4), (2, 5)]) + >>> nx.approximation.min_weighted_dominating_set(G) + {1, 2, 4} + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + Notes + ----- + This algorithm computes an approximate minimum weighted dominating + set for the graph `G`. The returned solution has weight `(\log + w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of each + node in the graph and `w(V^*)` denotes the sum of the weights of + each node in the minimum weight dominating set for the graph. + + This implementation of the algorithm runs in $O(m)$ time, where $m$ + is the number of edges in the graph. + + References + ---------- + .. [1] Vazirani, Vijay V. + *Approximation Algorithms*. + Springer Science & Business Media, 2001. + + """ + # The unique dominating set for the null graph is the empty set. + if len(G) == 0: + return set() + + # This is the dominating set that will eventually be returned. + dom_set = set() + + def _cost(node_and_neighborhood): + """Returns the cost-effectiveness of greedily choosing the given + node. + + `node_and_neighborhood` is a two-tuple comprising a node and its + closed neighborhood. + + """ + v, neighborhood = node_and_neighborhood + return G.nodes[v].get(weight, 1) / len(neighborhood - dom_set) + + # This is a set of all vertices not already covered by the + # dominating set. + vertices = set(G) + # This is a dictionary mapping each node to the closed neighborhood + # of that node. + neighborhoods = {v: {v} | set(G[v]) for v in G} + + # Continue until all vertices are adjacent to some node in the + # dominating set. + while vertices: + # Find the most cost-effective node to add, along with its + # closed neighborhood. + dom_node, min_set = min(neighborhoods.items(), key=_cost) + # Add the node to the dominating set and reduce the remaining + # set of nodes to cover. + dom_set.add(dom_node) + del neighborhoods[dom_node] + vertices -= min_set + + return dom_set + + +@nx._dispatchable +def min_edge_dominating_set(G): + r"""Returns minimum cardinality edge dominating set. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + min_edge_dominating_set : set + Returns a set of dominating edges whose size is no more than 2 * OPT. + + Examples + -------- + >>> G = nx.petersen_graph() + >>> nx.approximation.min_edge_dominating_set(G) + {(0, 1), (4, 9), (6, 8), (5, 7), (2, 3)} + + Raises + ------ + ValueError + If the input graph `G` is empty. + + Notes + ----- + The algorithm computes an approximate solution to the edge dominating set + problem. The result is no more than 2 * OPT in terms of size of the set. + Runtime of the algorithm is $O(|E|)$. + """ + if not G: + raise ValueError("Expected non-empty NetworkX graph!") + return maximal_matching(G) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py new file mode 100644 index 0000000000000000000000000000000000000000..b540bd5f4a6fd8bc6e0b14744e562861731f5124 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py @@ -0,0 +1,369 @@ +""" Fast approximation for k-component structure +""" +import itertools +from collections import defaultdict +from collections.abc import Mapping +from functools import cached_property + +import networkx as nx +from networkx.algorithms.approximation import local_node_connectivity +from networkx.exception import NetworkXError +from networkx.utils import not_implemented_for + +__all__ = ["k_components"] + + +@not_implemented_for("directed") +@nx._dispatchable(name="approximate_k_components") +def k_components(G, min_density=0.95): + r"""Returns the approximate k-component structure of a graph G. + + A `k`-component is a maximal subgraph of a graph G that has, at least, + node connectivity `k`: we need to remove at least `k` nodes to break it + into more components. `k`-components have an inherent hierarchical + structure because they are nested in terms of connectivity: a connected + graph can contain several 2-components, each of which can contain + one or more 3-components, and so forth. + + This implementation is based on the fast heuristics to approximate + the `k`-component structure of a graph [1]_. Which, in turn, it is based on + a fast approximation algorithm for finding good lower bounds of the number + of node independent paths between two nodes [2]_. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + min_density : Float + Density relaxation threshold. Default value 0.95 + + Returns + ------- + k_components : dict + Dictionary with connectivity level `k` as key and a list of + sets of nodes that form a k-component of level `k` as values. + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + Examples + -------- + >>> # Petersen graph has 10 nodes and it is triconnected, thus all + >>> # nodes are in a single component on all three connectivity levels + >>> from networkx.algorithms import approximation as apxa + >>> G = nx.petersen_graph() + >>> k_components = apxa.k_components(G) + + Notes + ----- + The logic of the approximation algorithm for computing the `k`-component + structure [1]_ is based on repeatedly applying simple and fast algorithms + for `k`-cores and biconnected components in order to narrow down the + number of pairs of nodes over which we have to compute White and Newman's + approximation algorithm for finding node independent paths [2]_. More + formally, this algorithm is based on Whitney's theorem, which states + an inclusion relation among node connectivity, edge connectivity, and + minimum degree for any graph G. This theorem implies that every + `k`-component is nested inside a `k`-edge-component, which in turn, + is contained in a `k`-core. Thus, this algorithm computes node independent + paths among pairs of nodes in each biconnected part of each `k`-core, + and repeats this procedure for each `k` from 3 to the maximal core number + of a node in the input graph. + + Because, in practice, many nodes of the core of level `k` inside a + bicomponent actually are part of a component of level k, the auxiliary + graph needed for the algorithm is likely to be very dense. Thus, we use + a complement graph data structure (see `AntiGraph`) to save memory. + AntiGraph only stores information of the edges that are *not* present + in the actual auxiliary graph. When applying algorithms to this + complement graph data structure, it behaves as if it were the dense + version. + + See also + -------- + k_components + + References + ---------- + .. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion: + Visualization and Heuristics for Fast Computation. + https://arxiv.org/pdf/1503.04476v1 + + .. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind + + .. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness: + A hierarchical conception of social groups. + American Sociological Review 68(1), 103--28. + https://doi.org/10.2307/3088904 + + """ + # Dictionary with connectivity level (k) as keys and a list of + # sets of nodes that form a k-component as values + k_components = defaultdict(list) + # make a few functions local for speed + node_connectivity = local_node_connectivity + k_core = nx.k_core + core_number = nx.core_number + biconnected_components = nx.biconnected_components + combinations = itertools.combinations + # Exact solution for k = {1,2} + # There is a linear time algorithm for triconnectivity, if we had an + # implementation available we could start from k = 4. + for component in nx.connected_components(G): + # isolated nodes have connectivity 0 + comp = set(component) + if len(comp) > 1: + k_components[1].append(comp) + for bicomponent in nx.biconnected_components(G): + # avoid considering dyads as bicomponents + bicomp = set(bicomponent) + if len(bicomp) > 2: + k_components[2].append(bicomp) + # There is no k-component of k > maximum core number + # \kappa(G) <= \lambda(G) <= \delta(G) + g_cnumber = core_number(G) + max_core = max(g_cnumber.values()) + for k in range(3, max_core + 1): + C = k_core(G, k, core_number=g_cnumber) + for nodes in biconnected_components(C): + # Build a subgraph SG induced by the nodes that are part of + # each biconnected component of the k-core subgraph C. + if len(nodes) < k: + continue + SG = G.subgraph(nodes) + # Build auxiliary graph + H = _AntiGraph() + H.add_nodes_from(SG.nodes()) + for u, v in combinations(SG, 2): + K = node_connectivity(SG, u, v, cutoff=k) + if k > K: + H.add_edge(u, v) + for h_nodes in biconnected_components(H): + if len(h_nodes) <= k: + continue + SH = H.subgraph(h_nodes) + for Gc in _cliques_heuristic(SG, SH, k, min_density): + for k_nodes in biconnected_components(Gc): + Gk = nx.k_core(SG.subgraph(k_nodes), k) + if len(Gk) <= k: + continue + k_components[k].append(set(Gk)) + return k_components + + +def _cliques_heuristic(G, H, k, min_density): + h_cnumber = nx.core_number(H) + for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)): + cands = {n for n, c in h_cnumber.items() if c == c_value} + # Skip checking for overlap for the highest core value + if i == 0: + overlap = False + else: + overlap = set.intersection( + *[{x for x in H[n] if x not in cands} for n in cands] + ) + if overlap and len(overlap) < k: + SH = H.subgraph(cands | overlap) + else: + SH = H.subgraph(cands) + sh_cnumber = nx.core_number(SH) + SG = nx.k_core(G.subgraph(SH), k) + while not (_same(sh_cnumber) and nx.density(SH) >= min_density): + # This subgraph must be writable => .copy() + SH = H.subgraph(SG).copy() + if len(SH) <= k: + break + sh_cnumber = nx.core_number(SH) + sh_deg = dict(SH.degree()) + min_deg = min(sh_deg.values()) + SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg) + SG = nx.k_core(G.subgraph(SH), k) + else: + yield SG + + +def _same(measure, tol=0): + vals = set(measure.values()) + if (max(vals) - min(vals)) <= tol: + return True + return False + + +class _AntiGraph(nx.Graph): + """ + Class for complement graphs. + + The main goal is to be able to work with big and dense graphs with + a low memory footprint. + + In this class you add the edges that *do not exist* in the dense graph, + the report methods of the class return the neighbors, the edges and + the degree as if it was the dense graph. Thus it's possible to use + an instance of this class with some of NetworkX functions. In this + case we only use k-core, connected_components, and biconnected_components. + """ + + all_edge_dict = {"weight": 1} + + def single_edge_dict(self): + return self.all_edge_dict + + edge_attr_dict_factory = single_edge_dict # type: ignore[assignment] + + def __getitem__(self, n): + """Returns a dict of neighbors of node n in the dense graph. + + Parameters + ---------- + n : node + A node in the graph. + + Returns + ------- + adj_dict : dictionary + The adjacency dictionary for nodes connected to n. + + """ + all_edge_dict = self.all_edge_dict + return { + node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n} + } + + def neighbors(self, n): + """Returns an iterator over all neighbors of node n in the + dense graph. + """ + try: + return iter(set(self._adj) - set(self._adj[n]) - {n}) + except KeyError as err: + raise NetworkXError(f"The node {n} is not in the graph.") from err + + class AntiAtlasView(Mapping): + """An adjacency inner dict for AntiGraph""" + + def __init__(self, graph, node): + self._graph = graph + self._atlas = graph._adj[node] + self._node = node + + def __len__(self): + return len(self._graph) - len(self._atlas) - 1 + + def __iter__(self): + return (n for n in self._graph if n not in self._atlas and n != self._node) + + def __getitem__(self, nbr): + nbrs = set(self._graph._adj) - set(self._atlas) - {self._node} + if nbr in nbrs: + return self._graph.all_edge_dict + raise KeyError(nbr) + + class AntiAdjacencyView(AntiAtlasView): + """An adjacency outer dict for AntiGraph""" + + def __init__(self, graph): + self._graph = graph + self._atlas = graph._adj + + def __len__(self): + return len(self._atlas) + + def __iter__(self): + return iter(self._graph) + + def __getitem__(self, node): + if node not in self._graph: + raise KeyError(node) + return self._graph.AntiAtlasView(self._graph, node) + + @cached_property + def adj(self): + return self.AntiAdjacencyView(self) + + def subgraph(self, nodes): + """This subgraph method returns a full AntiGraph. Not a View""" + nodes = set(nodes) + G = _AntiGraph() + G.add_nodes_from(nodes) + for n in G: + Gnbrs = G.adjlist_inner_dict_factory() + G._adj[n] = Gnbrs + for nbr, d in self._adj[n].items(): + if nbr in G._adj: + Gnbrs[nbr] = d + G._adj[nbr][n] = d + G.graph = self.graph + return G + + class AntiDegreeView(nx.reportviews.DegreeView): + def __iter__(self): + all_nodes = set(self._succ) + for n in self._nodes: + nbrs = all_nodes - set(self._succ[n]) - {n} + yield (n, len(nbrs)) + + def __getitem__(self, n): + nbrs = set(self._succ) - set(self._succ[n]) - {n} + # AntiGraph is a ThinGraph so all edges have weight 1 + return len(nbrs) + (n in nbrs) + + @cached_property + def degree(self): + """Returns an iterator for (node, degree) and degree for single node. + + The node degree is the number of edges adjacent to the node. + + Parameters + ---------- + nbunch : iterable container, optional (default=all nodes) + A container of nodes. The container will be iterated + through once. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + deg: + Degree of the node, if a single node is passed as argument. + nd_iter : an iterator + The iterator returns two-tuples of (node, degree). + + See Also + -------- + degree + + Examples + -------- + >>> G = nx.path_graph(4) + >>> G.degree(0) # node 0 with degree 1 + 1 + >>> list(G.degree([0, 1])) + [(0, 1), (1, 2)] + + """ + return self.AntiDegreeView(self) + + def adjacency(self): + """Returns an iterator of (node, adjacency set) tuples for all nodes + in the dense graph. + + This is the fastest way to look at every edge. + For directed graphs, only outgoing adjacencies are included. + + Returns + ------- + adj_iter : iterator + An iterator of (node, adjacency set) for all nodes in + the graph. + + """ + for n in self._adj: + yield (n, set(self._adj) - set(self._adj[n]) - {n}) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/matching.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..3a7c8a39b2e2884ac5a8f1ff8ca795e7c7bdb73c --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/matching.py @@ -0,0 +1,43 @@ +""" +************** +Graph Matching +************** + +Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent +edges; that is, no two edges share a common vertex. + +`Wikipedia: Matching `_ +""" +import networkx as nx + +__all__ = ["min_maximal_matching"] + + +@nx._dispatchable +def min_maximal_matching(G): + r"""Returns the minimum maximal matching of G. That is, out of all maximal + matchings of the graph G, the smallest is returned. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + min_maximal_matching : set + Returns a set of edges such that no two edges share a common endpoint + and every edge not in the set shares some common endpoint in the set. + Cardinality will be 2*OPT in the worst case. + + Notes + ----- + The algorithm computes an approximate solution for the minimum maximal + cardinality matching problem. The solution is no more than 2 * OPT in size. + Runtime is $O(|E|)$. + + References + ---------- + .. [1] Vazirani, Vijay Approximation Algorithms (2001) + """ + return nx.maximal_matching(G) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/maxcut.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/maxcut.py new file mode 100644 index 0000000000000000000000000000000000000000..f4e1da87c35ab821f4b3d0851bba19d599d8fa6a --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/maxcut.py @@ -0,0 +1,143 @@ +import networkx as nx +from networkx.utils.decorators import not_implemented_for, py_random_state + +__all__ = ["randomized_partitioning", "one_exchange"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(1) +@nx._dispatchable(edge_attrs="weight") +def randomized_partitioning(G, seed=None, p=0.5, weight=None): + """Compute a random partitioning of the graph nodes and its cut value. + + A partitioning is calculated by observing each node + and deciding to add it to the partition with probability `p`, + returning a random cut and its corresponding value (the + sum of weights of edges connecting different partitions). + + Parameters + ---------- + G : NetworkX graph + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + p : scalar + Probability for each node to be part of the first partition. + Should be in [0,1] + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + cut_size : scalar + Value of the minimum cut. + + partition : pair of node sets + A partitioning of the nodes that defines a minimum cut. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> cut_size, partition = nx.approximation.randomized_partitioning(G, seed=1) + >>> cut_size + 6 + >>> partition + ({0, 3, 4}, {1, 2}) + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + """ + cut = {node for node in G.nodes() if seed.random() < p} + cut_size = nx.algorithms.cut_size(G, cut, weight=weight) + partition = (cut, G.nodes - cut) + return cut_size, partition + + +def _swap_node_partition(cut, node): + return cut - {node} if node in cut else cut.union({node}) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(2) +@nx._dispatchable(edge_attrs="weight") +def one_exchange(G, initial_cut=None, seed=None, weight=None): + """Compute a partitioning of the graphs nodes and the corresponding cut value. + + Use a greedy one exchange strategy to find a locally maximal cut + and its value, it works by finding the best node (one that gives + the highest gain to the cut value) to add to the current cut + and repeats this process until no improvement can be made. + + Parameters + ---------- + G : networkx Graph + Graph to find a maximum cut for. + + initial_cut : set + Cut to use as a starting point. If not supplied the algorithm + starts with an empty cut. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + cut_value : scalar + Value of the maximum cut. + + partition : pair of node sets + A partitioning of the nodes that defines a maximum cut. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> curr_cut_size, partition = nx.approximation.one_exchange(G, seed=1) + >>> curr_cut_size + 6 + >>> partition + ({0, 2}, {1, 3, 4}) + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + """ + if initial_cut is None: + initial_cut = set() + cut = set(initial_cut) + current_cut_size = nx.algorithms.cut_size(G, cut, weight=weight) + while True: + nodes = list(G.nodes()) + # Shuffling the nodes ensures random tie-breaks in the following call to max + seed.shuffle(nodes) + best_node_to_swap = max( + nodes, + key=lambda v: nx.algorithms.cut_size( + G, _swap_node_partition(cut, v), weight=weight + ), + default=None, + ) + potential_cut = _swap_node_partition(cut, best_node_to_swap) + potential_cut_size = nx.algorithms.cut_size(G, potential_cut, weight=weight) + + if potential_cut_size > current_cut_size: + cut = potential_cut + current_cut_size = potential_cut_size + else: + break + + partition = (cut, G.nodes - cut) + return current_cut_size, partition diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/ramsey.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/ramsey.py new file mode 100644 index 0000000000000000000000000000000000000000..5cb9fda04494718e1bd9d908d77e08a3a9ec0495 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/ramsey.py @@ -0,0 +1,52 @@ +""" +Ramsey numbers. +""" +import networkx as nx +from networkx.utils import not_implemented_for + +from ...utils import arbitrary_element + +__all__ = ["ramsey_R2"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def ramsey_R2(G): + r"""Compute the largest clique and largest independent set in `G`. + + This can be used to estimate bounds for the 2-color + Ramsey number `R(2;s,t)` for `G`. + + This is a recursive implementation which could run into trouble + for large recursions. Note that self-loop edges are ignored. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + max_pair : (set, set) tuple + Maximum clique, Maximum independent set. + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + """ + if not G: + return set(), set() + + node = arbitrary_element(G) + nbrs = (nbr for nbr in nx.all_neighbors(G, node) if nbr != node) + nnbrs = nx.non_neighbors(G, node) + c_1, i_1 = ramsey_R2(G.subgraph(nbrs).copy()) + c_2, i_2 = ramsey_R2(G.subgraph(nnbrs).copy()) + + c_1.add(node) + i_2.add(node) + # Choose the larger of the two cliques and the larger of the two + # independent sets, according to cardinality. + return max(c_1, c_2, key=len), max(i_1, i_2, key=len) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py new file mode 100644 index 0000000000000000000000000000000000000000..2a31b72810c55c71946e074306ab1853d296aa47 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py @@ -0,0 +1,1498 @@ +""" +================================= +Travelling Salesman Problem (TSP) +================================= + +Implementation of approximate algorithms +for solving and approximating the TSP problem. + +Categories of algorithms which are implemented: + +- Christofides (provides a 3/2-approximation of TSP) +- Greedy +- Simulated Annealing (SA) +- Threshold Accepting (TA) +- Asadpour Asymmetric Traveling Salesman Algorithm + +The Travelling Salesman Problem tries to find, given the weight +(distance) between all points where a salesman has to visit, the +route so that: + +- The total distance (cost) which the salesman travels is minimized. +- The salesman returns to the starting point. +- Note that for a complete graph, the salesman visits each point once. + +The function `travelling_salesman_problem` allows for incomplete +graphs by finding all-pairs shortest paths, effectively converting +the problem to a complete graph problem. It calls one of the +approximate methods on that problem and then converts the result +back to the original graph using the previously found shortest paths. + +TSP is an NP-hard problem in combinatorial optimization, +important in operations research and theoretical computer science. + +http://en.wikipedia.org/wiki/Travelling_salesman_problem +""" +import math + +import networkx as nx +from networkx.algorithms.tree.mst import random_spanning_tree +from networkx.utils import not_implemented_for, pairwise, py_random_state + +__all__ = [ + "traveling_salesman_problem", + "christofides", + "asadpour_atsp", + "greedy_tsp", + "simulated_annealing_tsp", + "threshold_accepting_tsp", +] + + +def swap_two_nodes(soln, seed): + """Swap two nodes in `soln` to give a neighbor solution. + + Parameters + ---------- + soln : list of nodes + Current cycle of nodes + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + list + The solution after move is applied. (A neighbor solution.) + + Notes + ----- + This function assumes that the incoming list `soln` is a cycle + (that the first and last element are the same) and also that + we don't want any move to change the first node in the list + (and thus not the last node either). + + The input list is changed as well as returned. Make a copy if needed. + + See Also + -------- + move_one_node + """ + a, b = seed.sample(range(1, len(soln) - 1), k=2) + soln[a], soln[b] = soln[b], soln[a] + return soln + + +def move_one_node(soln, seed): + """Move one node to another position to give a neighbor solution. + + The node to move and the position to move to are chosen randomly. + The first and last nodes are left untouched as soln must be a cycle + starting at that node. + + Parameters + ---------- + soln : list of nodes + Current cycle of nodes + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + list + The solution after move is applied. (A neighbor solution.) + + Notes + ----- + This function assumes that the incoming list `soln` is a cycle + (that the first and last element are the same) and also that + we don't want any move to change the first node in the list + (and thus not the last node either). + + The input list is changed as well as returned. Make a copy if needed. + + See Also + -------- + swap_two_nodes + """ + a, b = seed.sample(range(1, len(soln) - 1), k=2) + soln.insert(b, soln.pop(a)) + return soln + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def christofides(G, weight="weight", tree=None): + """Approximate a solution of the traveling salesman problem + + Compute a 3/2-approximation of the traveling salesman problem + in a complete undirected graph using Christofides [1]_ algorithm. + + Parameters + ---------- + G : Graph + `G` should be a complete weighted undirected graph. + The distance between all pairs of nodes should be included. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + tree : NetworkX graph or None (default: None) + A minimum spanning tree of G. Or, if None, the minimum spanning + tree is computed using :func:`networkx.minimum_spanning_tree` + + Returns + ------- + list + List of nodes in `G` along a cycle with a 3/2-approximation of + the minimal Hamiltonian cycle. + + References + ---------- + .. [1] Christofides, Nicos. "Worst-case analysis of a new heuristic for + the travelling salesman problem." No. RR-388. Carnegie-Mellon Univ + Pittsburgh Pa Management Sciences Research Group, 1976. + """ + # Remove selfloops if necessary + loop_nodes = nx.nodes_with_selfloops(G) + try: + node = next(loop_nodes) + except StopIteration: + pass + else: + G = G.copy() + G.remove_edge(node, node) + G.remove_edges_from((n, n) for n in loop_nodes) + # Check that G is a complete graph + N = len(G) - 1 + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G must be a complete graph.") + + if tree is None: + tree = nx.minimum_spanning_tree(G, weight=weight) + L = G.copy() + L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)]) + MG = nx.MultiGraph() + MG.add_edges_from(tree.edges) + edges = nx.min_weight_matching(L, weight=weight) + MG.add_edges_from(edges) + return _shortcutting(nx.eulerian_circuit(MG)) + + +def _shortcutting(circuit): + """Remove duplicate nodes in the path""" + nodes = [] + for u, v in circuit: + if v in nodes: + continue + if not nodes: + nodes.append(u) + nodes.append(v) + nodes.append(nodes[0]) + return nodes + + +@nx._dispatchable(edge_attrs="weight") +def traveling_salesman_problem( + G, weight="weight", nodes=None, cycle=True, method=None, **kwargs +): + """Find the shortest path in `G` connecting specified nodes + + This function allows approximate solution to the traveling salesman + problem on networks that are not complete graphs and/or where the + salesman does not need to visit all nodes. + + This function proceeds in two steps. First, it creates a complete + graph using the all-pairs shortest_paths between nodes in `nodes`. + Edge weights in the new graph are the lengths of the paths + between each pair of nodes in the original graph. + Second, an algorithm (default: `christofides` for undirected and + `asadpour_atsp` for directed) is used to approximate the minimal Hamiltonian + cycle on this new graph. The available algorithms are: + + - christofides + - greedy_tsp + - simulated_annealing_tsp + - threshold_accepting_tsp + - asadpour_atsp + + Once the Hamiltonian Cycle is found, this function post-processes to + accommodate the structure of the original graph. If `cycle` is ``False``, + the biggest weight edge is removed to make a Hamiltonian path. + Then each edge on the new complete graph used for that analysis is + replaced by the shortest_path between those nodes on the original graph. + If the input graph `G` includes edges with weights that do not adhere to + the triangle inequality, such as when `G` is not a complete graph (i.e + length of non-existent edges is infinity), then the returned path may + contain some repeating nodes (other than the starting node). + + Parameters + ---------- + G : NetworkX graph + A possibly weighted graph + + nodes : collection of nodes (default=G.nodes) + collection (list, set, etc.) of nodes to visit + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + cycle : bool (default: True) + Indicates whether a cycle should be returned, or a path. + Note: the cycle is the approximate minimal cycle. + The path simply removes the biggest edge in that cycle. + + method : function (default: None) + A function that returns a cycle on all nodes and approximates + the solution to the traveling salesman problem on a complete + graph. The returned cycle is then used to find a corresponding + solution on `G`. `method` should be callable; take inputs + `G`, and `weight`; and return a list of nodes along the cycle. + + Provided options include :func:`christofides`, :func:`greedy_tsp`, + :func:`simulated_annealing_tsp` and :func:`threshold_accepting_tsp`. + + If `method is None`: use :func:`christofides` for undirected `G` and + :func:`asadpour_atsp` for directed `G`. + + **kwargs : dict + Other keyword arguments to be passed to the `method` function passed in. + + Returns + ------- + list + List of nodes in `G` along a path with an approximation of the minimal + path through `nodes`. + + Raises + ------ + NetworkXError + If `G` is a directed graph it has to be strongly connected or the + complete version cannot be generated. + + Examples + -------- + >>> tsp = nx.approximation.traveling_salesman_problem + >>> G = nx.cycle_graph(9) + >>> G[4][5]["weight"] = 5 # all other weights are 1 + >>> tsp(G, nodes=[3, 6]) + [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3] + >>> path = tsp(G, cycle=False) + >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4]) + True + + While no longer required, you can still build (curry) your own function + to provide parameter values to the methods. + + >>> SA_tsp = nx.approximation.simulated_annealing_tsp + >>> method = lambda G, weight: SA_tsp(G, "greedy", weight=weight, temp=500) + >>> path = tsp(G, cycle=False, method=method) + >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4]) + True + + Otherwise, pass other keyword arguments directly into the tsp function. + + >>> path = tsp( + ... G, + ... cycle=False, + ... method=nx.approximation.simulated_annealing_tsp, + ... init_cycle="greedy", + ... temp=500, + ... ) + >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4]) + True + """ + if method is None: + if G.is_directed(): + method = asadpour_atsp + else: + method = christofides + if nodes is None: + nodes = list(G.nodes) + + dist = {} + path = {} + for n, (d, p) in nx.all_pairs_dijkstra(G, weight=weight): + dist[n] = d + path[n] = p + + if G.is_directed(): + # If the graph is not strongly connected, raise an exception + if not nx.is_strongly_connected(G): + raise nx.NetworkXError("G is not strongly connected") + GG = nx.DiGraph() + else: + GG = nx.Graph() + for u in nodes: + for v in nodes: + if u == v: + continue + GG.add_edge(u, v, weight=dist[u][v]) + + best_GG = method(GG, weight=weight, **kwargs) + + if not cycle: + # find and remove the biggest edge + (u, v) = max(pairwise(best_GG), key=lambda x: dist[x[0]][x[1]]) + pos = best_GG.index(u) + 1 + while best_GG[pos] != v: + pos = best_GG[pos:].index(u) + 1 + best_GG = best_GG[pos:-1] + best_GG[:pos] + + best_path = [] + for u, v in pairwise(best_GG): + best_path.extend(path[u][v][:-1]) + best_path.append(v) + return best_path + + +@not_implemented_for("undirected") +@py_random_state(2) +@nx._dispatchable(edge_attrs="weight", mutates_input=True) +def asadpour_atsp(G, weight="weight", seed=None, source=None): + """ + Returns an approximate solution to the traveling salesman problem. + + This approximate solution is one of the best known approximations for the + asymmetric traveling salesman problem developed by Asadpour et al, + [1]_. The algorithm first solves the Held-Karp relaxation to find a lower + bound for the weight of the cycle. Next, it constructs an exponential + distribution of undirected spanning trees where the probability of an + edge being in the tree corresponds to the weight of that edge using a + maximum entropy rounding scheme. Next we sample that distribution + $2 \\lceil \\ln n \\rceil$ times and save the minimum sampled tree once the + direction of the arcs is added back to the edges. Finally, we augment + then short circuit that graph to find the approximate tour for the + salesman. + + Parameters + ---------- + G : nx.DiGraph + The graph should be a complete weighted directed graph. The + distance between all paris of nodes should be included and the triangle + inequality should hold. That is, the direct edge between any two nodes + should be the path of least cost. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + source : node label (default=`None`) + If given, return the cycle starting and ending at the given node. + + Returns + ------- + cycle : list of nodes + Returns the cycle (list of nodes) that a salesman can follow to minimize + the total weight of the trip. + + Raises + ------ + NetworkXError + If `G` is not complete or has less than two nodes, the algorithm raises + an exception. + + NetworkXError + If `source` is not `None` and is not a node in `G`, the algorithm raises + an exception. + + NetworkXNotImplemented + If `G` is an undirected graph. + + References + ---------- + .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi, + An o(log n/log log n)-approximation algorithm for the asymmetric + traveling salesman problem, Operations research, 65 (2017), + pp. 1043–1061 + + Examples + -------- + >>> import networkx as nx + >>> import networkx.algorithms.approximation as approx + >>> G = nx.complete_graph(3, create_using=nx.DiGraph) + >>> nx.set_edge_attributes( + ... G, {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1}, "weight" + ... ) + >>> tour = approx.asadpour_atsp(G, source=0) + >>> tour + [0, 2, 1, 0] + """ + from math import ceil, exp + from math import log as ln + + # Check that G is a complete graph + N = len(G) - 1 + if N < 2: + raise nx.NetworkXError("G must have at least two nodes") + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G is not a complete DiGraph") + # Check that the source vertex, if given, is in the graph + if source is not None and source not in G.nodes: + raise nx.NetworkXError("Given source node not in G.") + + opt_hk, z_star = held_karp_ascent(G, weight) + + # Test to see if the ascent method found an integer solution or a fractional + # solution. If it is integral then z_star is a nx.Graph, otherwise it is + # a dict + if not isinstance(z_star, dict): + # Here we are using the shortcutting method to go from the list of edges + # returned from eulerian_circuit to a list of nodes + return _shortcutting(nx.eulerian_circuit(z_star, source=source)) + + # Create the undirected support of z_star + z_support = nx.MultiGraph() + for u, v in z_star: + if (u, v) not in z_support.edges: + edge_weight = min(G[u][v][weight], G[v][u][weight]) + z_support.add_edge(u, v, **{weight: edge_weight}) + + # Create the exponential distribution of spanning trees + gamma = spanning_tree_distribution(z_support, z_star) + + # Write the lambda values to the edges of z_support + z_support = nx.Graph(z_support) + lambda_dict = {(u, v): exp(gamma[(u, v)]) for u, v in z_support.edges()} + nx.set_edge_attributes(z_support, lambda_dict, "weight") + del gamma, lambda_dict + + # Sample 2 * ceil( ln(n) ) spanning trees and record the minimum one + minimum_sampled_tree = None + minimum_sampled_tree_weight = math.inf + for _ in range(2 * ceil(ln(G.number_of_nodes()))): + sampled_tree = random_spanning_tree(z_support, "weight", seed=seed) + sampled_tree_weight = sampled_tree.size(weight) + if sampled_tree_weight < minimum_sampled_tree_weight: + minimum_sampled_tree = sampled_tree.copy() + minimum_sampled_tree_weight = sampled_tree_weight + + # Orient the edges in that tree to keep the cost of the tree the same. + t_star = nx.MultiDiGraph() + for u, v, d in minimum_sampled_tree.edges(data=weight): + if d == G[u][v][weight]: + t_star.add_edge(u, v, **{weight: d}) + else: + t_star.add_edge(v, u, **{weight: d}) + + # Find the node demands needed to neutralize the flow of t_star in G + node_demands = {n: t_star.out_degree(n) - t_star.in_degree(n) for n in t_star} + nx.set_node_attributes(G, node_demands, "demand") + + # Find the min_cost_flow + flow_dict = nx.min_cost_flow(G, "demand") + + # Build the flow into t_star + for source, values in flow_dict.items(): + for target in values: + if (source, target) not in t_star.edges and values[target] > 0: + # IF values[target] > 0 we have to add that many edges + for _ in range(values[target]): + t_star.add_edge(source, target) + + # Return the shortcut eulerian circuit + circuit = nx.eulerian_circuit(t_star, source=source) + return _shortcutting(circuit) + + +@nx._dispatchable(edge_attrs="weight", mutates_input=True, returns_graph=True) +def held_karp_ascent(G, weight="weight"): + """ + Minimizes the Held-Karp relaxation of the TSP for `G` + + Solves the Held-Karp relaxation of the input complete digraph and scales + the output solution for use in the Asadpour [1]_ ASTP algorithm. + + The Held-Karp relaxation defines the lower bound for solutions to the + ATSP, although it does return a fractional solution. This is used in the + Asadpour algorithm as an initial solution which is later rounded to a + integral tree within the spanning tree polytopes. This function solves + the relaxation with the branch and bound method in [2]_. + + Parameters + ---------- + G : nx.DiGraph + The graph should be a complete weighted directed graph. + The distance between all paris of nodes should be included. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + Returns + ------- + OPT : float + The cost for the optimal solution to the Held-Karp relaxation + z : dict or nx.Graph + A symmetrized and scaled version of the optimal solution to the + Held-Karp relaxation for use in the Asadpour algorithm. + + If an integral solution is found, then that is an optimal solution for + the ATSP problem and that is returned instead. + + References + ---------- + .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi, + An o(log n/log log n)-approximation algorithm for the asymmetric + traveling salesman problem, Operations research, 65 (2017), + pp. 1043–1061 + + .. [2] M. Held, R. M. Karp, The traveling-salesman problem and minimum + spanning trees, Operations Research, 1970-11-01, Vol. 18 (6), + pp.1138-1162 + """ + import numpy as np + from scipy import optimize + + def k_pi(): + """ + Find the set of minimum 1-Arborescences for G at point pi. + + Returns + ------- + Set + The set of minimum 1-Arborescences + """ + # Create a copy of G without vertex 1. + G_1 = G.copy() + minimum_1_arborescences = set() + minimum_1_arborescence_weight = math.inf + + # node is node '1' in the Held and Karp paper + n = next(G.__iter__()) + G_1.remove_node(n) + + # Iterate over the spanning arborescences of the graph until we know + # that we have found the minimum 1-arborescences. My proposed strategy + # is to find the most extensive root to connect to from 'node 1' and + # the least expensive one. We then iterate over arborescences until + # the cost of the basic arborescence is the cost of the minimum one + # plus the difference between the most and least expensive roots, + # that way the cost of connecting 'node 1' will by definition not by + # minimum + min_root = {"node": None, weight: math.inf} + max_root = {"node": None, weight: -math.inf} + for u, v, d in G.edges(n, data=True): + if d[weight] < min_root[weight]: + min_root = {"node": v, weight: d[weight]} + if d[weight] > max_root[weight]: + max_root = {"node": v, weight: d[weight]} + + min_in_edge = min(G.in_edges(n, data=True), key=lambda x: x[2][weight]) + min_root[weight] = min_root[weight] + min_in_edge[2][weight] + max_root[weight] = max_root[weight] + min_in_edge[2][weight] + + min_arb_weight = math.inf + for arb in nx.ArborescenceIterator(G_1): + arb_weight = arb.size(weight) + if min_arb_weight == math.inf: + min_arb_weight = arb_weight + elif arb_weight > min_arb_weight + max_root[weight] - min_root[weight]: + break + # We have to pick the root node of the arborescence for the out + # edge of the first vertex as that is the only node without an + # edge directed into it. + for N, deg in arb.in_degree: + if deg == 0: + # root found + arb.add_edge(n, N, **{weight: G[n][N][weight]}) + arb_weight += G[n][N][weight] + break + + # We can pick the minimum weight in-edge for the vertex with + # a cycle. If there are multiple edges with the same, minimum + # weight, We need to add all of them. + # + # Delete the edge (N, v) so that we cannot pick it. + edge_data = G[N][n] + G.remove_edge(N, n) + min_weight = min(G.in_edges(n, data=weight), key=lambda x: x[2])[2] + min_edges = [ + (u, v, d) for u, v, d in G.in_edges(n, data=weight) if d == min_weight + ] + for u, v, d in min_edges: + new_arb = arb.copy() + new_arb.add_edge(u, v, **{weight: d}) + new_arb_weight = arb_weight + d + # Check to see the weight of the arborescence, if it is a + # new minimum, clear all of the old potential minimum + # 1-arborescences and add this is the only one. If its + # weight is above the known minimum, do not add it. + if new_arb_weight < minimum_1_arborescence_weight: + minimum_1_arborescences.clear() + minimum_1_arborescence_weight = new_arb_weight + # We have a 1-arborescence, add it to the set + if new_arb_weight == minimum_1_arborescence_weight: + minimum_1_arborescences.add(new_arb) + G.add_edge(N, n, **edge_data) + + return minimum_1_arborescences + + def direction_of_ascent(): + """ + Find the direction of ascent at point pi. + + See [1]_ for more information. + + Returns + ------- + dict + A mapping from the nodes of the graph which represents the direction + of ascent. + + References + ---------- + .. [1] M. Held, R. M. Karp, The traveling-salesman problem and minimum + spanning trees, Operations Research, 1970-11-01, Vol. 18 (6), + pp.1138-1162 + """ + # 1. Set d equal to the zero n-vector. + d = {} + for n in G: + d[n] = 0 + del n + # 2. Find a 1-Arborescence T^k such that k is in K(pi, d). + minimum_1_arborescences = k_pi() + while True: + # Reduce K(pi) to K(pi, d) + # Find the arborescence in K(pi) which increases the lest in + # direction d + min_k_d_weight = math.inf + min_k_d = None + for arborescence in minimum_1_arborescences: + weighted_cost = 0 + for n, deg in arborescence.degree: + weighted_cost += d[n] * (deg - 2) + if weighted_cost < min_k_d_weight: + min_k_d_weight = weighted_cost + min_k_d = arborescence + + # 3. If sum of d_i * v_{i, k} is greater than zero, terminate + if min_k_d_weight > 0: + return d, min_k_d + # 4. d_i = d_i + v_{i, k} + for n, deg in min_k_d.degree: + d[n] += deg - 2 + # Check that we do not need to terminate because the direction + # of ascent does not exist. This is done with linear + # programming. + c = np.full(len(minimum_1_arborescences), -1, dtype=int) + a_eq = np.empty((len(G) + 1, len(minimum_1_arborescences)), dtype=int) + b_eq = np.zeros(len(G) + 1, dtype=int) + b_eq[len(G)] = 1 + for arb_count, arborescence in enumerate(minimum_1_arborescences): + n_count = len(G) - 1 + for n, deg in arborescence.degree: + a_eq[n_count][arb_count] = deg - 2 + n_count -= 1 + a_eq[len(G)][arb_count] = 1 + program_result = optimize.linprog( + c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm" + ) + # If the constants exist, then the direction of ascent doesn't + if program_result.success: + # There is no direction of ascent + return None, minimum_1_arborescences + + # 5. GO TO 2 + + def find_epsilon(k, d): + """ + Given the direction of ascent at pi, find the maximum distance we can go + in that direction. + + Parameters + ---------- + k_xy : set + The set of 1-arborescences which have the minimum rate of increase + in the direction of ascent + + d : dict + The direction of ascent + + Returns + ------- + float + The distance we can travel in direction `d` + """ + min_epsilon = math.inf + for e_u, e_v, e_w in G.edges(data=weight): + if (e_u, e_v) in k.edges: + continue + # Now, I have found a condition which MUST be true for the edges to + # be a valid substitute. The edge in the graph which is the + # substitute is the one with the same terminated end. This can be + # checked rather simply. + # + # Find the edge within k which is the substitute. Because k is a + # 1-arborescence, we know that they is only one such edges + # leading into every vertex. + if len(k.in_edges(e_v, data=weight)) > 1: + raise Exception + sub_u, sub_v, sub_w = next(k.in_edges(e_v, data=weight).__iter__()) + k.add_edge(e_u, e_v, **{weight: e_w}) + k.remove_edge(sub_u, sub_v) + if ( + max(d for n, d in k.in_degree()) <= 1 + and len(G) == k.number_of_edges() + and nx.is_weakly_connected(k) + ): + # Ascent method calculation + if d[sub_u] == d[e_u] or sub_w == e_w: + # Revert to the original graph + k.remove_edge(e_u, e_v) + k.add_edge(sub_u, sub_v, **{weight: sub_w}) + continue + epsilon = (sub_w - e_w) / (d[e_u] - d[sub_u]) + if 0 < epsilon < min_epsilon: + min_epsilon = epsilon + # Revert to the original graph + k.remove_edge(e_u, e_v) + k.add_edge(sub_u, sub_v, **{weight: sub_w}) + + return min_epsilon + + # I have to know that the elements in pi correspond to the correct elements + # in the direction of ascent, even if the node labels are not integers. + # Thus, I will use dictionaries to made that mapping. + pi_dict = {} + for n in G: + pi_dict[n] = 0 + del n + original_edge_weights = {} + for u, v, d in G.edges(data=True): + original_edge_weights[(u, v)] = d[weight] + dir_ascent, k_d = direction_of_ascent() + while dir_ascent is not None: + max_distance = find_epsilon(k_d, dir_ascent) + for n, v in dir_ascent.items(): + pi_dict[n] += max_distance * v + for u, v, d in G.edges(data=True): + d[weight] = original_edge_weights[(u, v)] + pi_dict[u] + dir_ascent, k_d = direction_of_ascent() + nx._clear_cache(G) + # k_d is no longer an individual 1-arborescence but rather a set of + # minimal 1-arborescences at the maximum point of the polytope and should + # be reflected as such + k_max = k_d + + # Search for a cycle within k_max. If a cycle exists, return it as the + # solution + for k in k_max: + if len([n for n in k if k.degree(n) == 2]) == G.order(): + # Tour found + # TODO: this branch does not restore original_edge_weights of G! + return k.size(weight), k + + # Write the original edge weights back to G and every member of k_max at + # the maximum point. Also average the number of times that edge appears in + # the set of minimal 1-arborescences. + x_star = {} + size_k_max = len(k_max) + for u, v, d in G.edges(data=True): + edge_count = 0 + d[weight] = original_edge_weights[(u, v)] + for k in k_max: + if (u, v) in k.edges(): + edge_count += 1 + k[u][v][weight] = original_edge_weights[(u, v)] + x_star[(u, v)] = edge_count / size_k_max + # Now symmetrize the edges in x_star and scale them according to (5) in + # reference [1] + z_star = {} + scale_factor = (G.order() - 1) / G.order() + for u, v in x_star: + frequency = x_star[(u, v)] + x_star[(v, u)] + if frequency > 0: + z_star[(u, v)] = scale_factor * frequency + del x_star + # Return the optimal weight and the z dict + return next(k_max.__iter__()).size(weight), z_star + + +@nx._dispatchable +def spanning_tree_distribution(G, z): + """ + Find the asadpour exponential distribution of spanning trees. + + Solves the Maximum Entropy Convex Program in the Asadpour algorithm [1]_ + using the approach in section 7 to build an exponential distribution of + undirected spanning trees. + + This algorithm ensures that the probability of any edge in a spanning + tree is proportional to the sum of the probabilities of the tress + containing that edge over the sum of the probabilities of all spanning + trees of the graph. + + Parameters + ---------- + G : nx.MultiGraph + The undirected support graph for the Held Karp relaxation + + z : dict + The output of `held_karp_ascent()`, a scaled version of the Held-Karp + solution. + + Returns + ------- + gamma : dict + The probability distribution which approximately preserves the marginal + probabilities of `z`. + """ + from math import exp + from math import log as ln + + def q(e): + """ + The value of q(e) is described in the Asadpour paper is "the + probability that edge e will be included in a spanning tree T that is + chosen with probability proportional to exp(gamma(T))" which + basically means that it is the total probability of the edge appearing + across the whole distribution. + + Parameters + ---------- + e : tuple + The `(u, v)` tuple describing the edge we are interested in + + Returns + ------- + float + The probability that a spanning tree chosen according to the + current values of gamma will include edge `e`. + """ + # Create the laplacian matrices + for u, v, d in G.edges(data=True): + d[lambda_key] = exp(gamma[(u, v)]) + G_Kirchhoff = nx.total_spanning_tree_weight(G, lambda_key) + G_e = nx.contracted_edge(G, e, self_loops=False) + G_e_Kirchhoff = nx.total_spanning_tree_weight(G_e, lambda_key) + + # Multiply by the weight of the contracted edge since it is not included + # in the total weight of the contracted graph. + return exp(gamma[(e[0], e[1])]) * G_e_Kirchhoff / G_Kirchhoff + + # initialize gamma to the zero dict + gamma = {} + for u, v, _ in G.edges: + gamma[(u, v)] = 0 + + # set epsilon + EPSILON = 0.2 + + # pick an edge attribute name that is unlikely to be in the graph + lambda_key = "spanning_tree_distribution's secret attribute name for lambda" + + while True: + # We need to know that know that no values of q_e are greater than + # (1 + epsilon) * z_e, however changing one gamma value can increase the + # value of a different q_e, so we have to complete the for loop without + # changing anything for the condition to be meet + in_range_count = 0 + # Search for an edge with q_e > (1 + epsilon) * z_e + for u, v in gamma: + e = (u, v) + q_e = q(e) + z_e = z[e] + if q_e > (1 + EPSILON) * z_e: + delta = ln( + (q_e * (1 - (1 + EPSILON / 2) * z_e)) + / ((1 - q_e) * (1 + EPSILON / 2) * z_e) + ) + gamma[e] -= delta + # Check that delta had the desired effect + new_q_e = q(e) + desired_q_e = (1 + EPSILON / 2) * z_e + if round(new_q_e, 8) != round(desired_q_e, 8): + raise nx.NetworkXError( + f"Unable to modify probability for edge ({u}, {v})" + ) + else: + in_range_count += 1 + # Check if the for loop terminated without changing any gamma + if in_range_count == len(gamma): + break + + # Remove the new edge attributes + for _, _, d in G.edges(data=True): + if lambda_key in d: + del d[lambda_key] + + return gamma + + +@nx._dispatchable(edge_attrs="weight") +def greedy_tsp(G, weight="weight", source=None): + """Return a low cost cycle starting at `source` and its cost. + + This approximates a solution to the traveling salesman problem. + It finds a cycle of all the nodes that a salesman can visit in order + to visit many nodes while minimizing total distance. + It uses a simple greedy algorithm. + In essence, this function returns a large cycle given a source point + for which the total cost of the cycle is minimized. + + Parameters + ---------- + G : Graph + The Graph should be a complete weighted undirected graph. + The distance between all pairs of nodes should be included. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + source : node, optional (default: first node in list(G)) + Starting node. If None, defaults to ``next(iter(G))`` + + Returns + ------- + cycle : list of nodes + Returns the cycle (list of nodes) that a salesman + can follow to minimize total weight of the trip. + + Raises + ------ + NetworkXError + If `G` is not complete, the algorithm raises an exception. + + Examples + -------- + >>> from networkx.algorithms import approximation as approx + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) + >>> cycle = approx.greedy_tsp(G, source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + + Notes + ----- + This implementation of a greedy algorithm is based on the following: + + - The algorithm adds a node to the solution at every iteration. + - The algorithm selects a node not already in the cycle whose connection + to the previous node adds the least cost to the cycle. + + A greedy algorithm does not always give the best solution. + However, it can construct a first feasible solution which can + be passed as a parameter to an iterative improvement algorithm such + as Simulated Annealing, or Threshold Accepting. + + Time complexity: It has a running time $O(|V|^2)$ + """ + # Check that G is a complete graph + N = len(G) - 1 + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G must be a complete graph.") + + if source is None: + source = nx.utils.arbitrary_element(G) + + if G.number_of_nodes() == 2: + neighbor = next(G.neighbors(source)) + return [source, neighbor, source] + + nodeset = set(G) + nodeset.remove(source) + cycle = [source] + next_node = source + while nodeset: + nbrdict = G[next_node] + next_node = min(nodeset, key=lambda n: nbrdict[n].get(weight, 1)) + cycle.append(next_node) + nodeset.remove(next_node) + cycle.append(cycle[0]) + return cycle + + +@py_random_state(9) +@nx._dispatchable(edge_attrs="weight") +def simulated_annealing_tsp( + G, + init_cycle, + weight="weight", + source=None, + temp=100, + move="1-1", + max_iterations=10, + N_inner=100, + alpha=0.01, + seed=None, +): + """Returns an approximate solution to the traveling salesman problem. + + This function uses simulated annealing to approximate the minimal cost + cycle through the nodes. Starting from a suboptimal solution, simulated + annealing perturbs that solution, occasionally accepting changes that make + the solution worse to escape from a locally optimal solution. The chance + of accepting such changes decreases over the iterations to encourage + an optimal result. In summary, the function returns a cycle starting + at `source` for which the total cost is minimized. It also returns the cost. + + The chance of accepting a proposed change is related to a parameter called + the temperature (annealing has a physical analogue of steel hardening + as it cools). As the temperature is reduced, the chance of moves that + increase cost goes down. + + Parameters + ---------- + G : Graph + `G` should be a complete weighted graph. + The distance between all pairs of nodes should be included. + + init_cycle : list of all nodes or "greedy" + The initial solution (a cycle through all nodes returning to the start). + This argument has no default to make you think about it. + If "greedy", use `greedy_tsp(G, weight)`. + Other common starting cycles are `list(G) + [next(iter(G))]` or the final + result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + source : node, optional (default: first node in list(G)) + Starting node. If None, defaults to ``next(iter(G))`` + + temp : int, optional (default=100) + The algorithm's temperature parameter. It represents the initial + value of temperature + + move : "1-1" or "1-0" or function, optional (default="1-1") + Indicator of what move to use when finding new trial solutions. + Strings indicate two special built-in moves: + + - "1-1": 1-1 exchange which transposes the position + of two elements of the current solution. + The function called is :func:`swap_two_nodes`. + For example if we apply 1-1 exchange in the solution + ``A = [3, 2, 1, 4, 3]`` + we can get the following by the transposition of 1 and 4 elements: + ``A' = [3, 2, 4, 1, 3]`` + - "1-0": 1-0 exchange which moves an node in the solution + to a new position. + The function called is :func:`move_one_node`. + For example if we apply 1-0 exchange in the solution + ``A = [3, 2, 1, 4, 3]`` + we can transfer the fourth element to the second position: + ``A' = [3, 4, 2, 1, 3]`` + + You may provide your own functions to enact a move from + one solution to a neighbor solution. The function must take + the solution as input along with a `seed` input to control + random number generation (see the `seed` input here). + Your function should maintain the solution as a cycle with + equal first and last node and all others appearing once. + Your function should return the new solution. + + max_iterations : int, optional (default=10) + Declared done when this number of consecutive iterations of + the outer loop occurs without any change in the best cost solution. + + N_inner : int, optional (default=100) + The number of iterations of the inner loop. + + alpha : float between (0, 1), optional (default=0.01) + Percentage of temperature decrease in each iteration + of outer loop + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + cycle : list of nodes + Returns the cycle (list of nodes) that a salesman + can follow to minimize total weight of the trip. + + Raises + ------ + NetworkXError + If `G` is not complete the algorithm raises an exception. + + Examples + -------- + >>> from networkx.algorithms import approximation as approx + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) + >>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + >>> incycle = ["D", "B", "A", "C", "D"] + >>> cycle = approx.simulated_annealing_tsp(G, incycle, source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + + Notes + ----- + Simulated Annealing is a metaheuristic local search algorithm. + The main characteristic of this algorithm is that it accepts + even solutions which lead to the increase of the cost in order + to escape from low quality local optimal solutions. + + This algorithm needs an initial solution. If not provided, it is + constructed by a simple greedy algorithm. At every iteration, the + algorithm selects thoughtfully a neighbor solution. + Consider $c(x)$ cost of current solution and $c(x')$ cost of a + neighbor solution. + If $c(x') - c(x) <= 0$ then the neighbor solution becomes the current + solution for the next iteration. Otherwise, the algorithm accepts + the neighbor solution with probability $p = exp - ([c(x') - c(x)] / temp)$. + Otherwise the current solution is retained. + + `temp` is a parameter of the algorithm and represents temperature. + + Time complexity: + For $N_i$ iterations of the inner loop and $N_o$ iterations of the + outer loop, this algorithm has running time $O(N_i * N_o * |V|)$. + + For more information and how the algorithm is inspired see: + http://en.wikipedia.org/wiki/Simulated_annealing + """ + if move == "1-1": + move = swap_two_nodes + elif move == "1-0": + move = move_one_node + if init_cycle == "greedy": + # Construct an initial solution using a greedy algorithm. + cycle = greedy_tsp(G, weight=weight, source=source) + if G.number_of_nodes() == 2: + return cycle + + else: + cycle = list(init_cycle) + if source is None: + source = cycle[0] + elif source != cycle[0]: + raise nx.NetworkXError("source must be first node in init_cycle") + if cycle[0] != cycle[-1]: + raise nx.NetworkXError("init_cycle must be a cycle. (return to start)") + + if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G): + raise nx.NetworkXError("init_cycle should be a cycle over all nodes in G.") + + # Check that G is a complete graph + N = len(G) - 1 + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G must be a complete graph.") + + if G.number_of_nodes() == 2: + neighbor = next(G.neighbors(source)) + return [source, neighbor, source] + + # Find the cost of initial solution + cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle)) + + count = 0 + best_cycle = cycle.copy() + best_cost = cost + while count <= max_iterations and temp > 0: + count += 1 + for i in range(N_inner): + adj_sol = move(cycle, seed) + adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol)) + delta = adj_cost - cost + if delta <= 0: + # Set current solution the adjacent solution. + cycle = adj_sol + cost = adj_cost + + if cost < best_cost: + count = 0 + best_cycle = cycle.copy() + best_cost = cost + else: + # Accept even a worse solution with probability p. + p = math.exp(-delta / temp) + if p >= seed.random(): + cycle = adj_sol + cost = adj_cost + temp -= temp * alpha + + return best_cycle + + +@py_random_state(9) +@nx._dispatchable(edge_attrs="weight") +def threshold_accepting_tsp( + G, + init_cycle, + weight="weight", + source=None, + threshold=1, + move="1-1", + max_iterations=10, + N_inner=100, + alpha=0.1, + seed=None, +): + """Returns an approximate solution to the traveling salesman problem. + + This function uses threshold accepting methods to approximate the minimal cost + cycle through the nodes. Starting from a suboptimal solution, threshold + accepting methods perturb that solution, accepting any changes that make + the solution no worse than increasing by a threshold amount. Improvements + in cost are accepted, but so are changes leading to small increases in cost. + This allows the solution to leave suboptimal local minima in solution space. + The threshold is decreased slowly as iterations proceed helping to ensure + an optimum. In summary, the function returns a cycle starting at `source` + for which the total cost is minimized. + + Parameters + ---------- + G : Graph + `G` should be a complete weighted graph. + The distance between all pairs of nodes should be included. + + init_cycle : list or "greedy" + The initial solution (a cycle through all nodes returning to the start). + This argument has no default to make you think about it. + If "greedy", use `greedy_tsp(G, weight)`. + Other common starting cycles are `list(G) + [next(iter(G))]` or the final + result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + source : node, optional (default: first node in list(G)) + Starting node. If None, defaults to ``next(iter(G))`` + + threshold : int, optional (default=1) + The algorithm's threshold parameter. It represents the initial + threshold's value + + move : "1-1" or "1-0" or function, optional (default="1-1") + Indicator of what move to use when finding new trial solutions. + Strings indicate two special built-in moves: + + - "1-1": 1-1 exchange which transposes the position + of two elements of the current solution. + The function called is :func:`swap_two_nodes`. + For example if we apply 1-1 exchange in the solution + ``A = [3, 2, 1, 4, 3]`` + we can get the following by the transposition of 1 and 4 elements: + ``A' = [3, 2, 4, 1, 3]`` + - "1-0": 1-0 exchange which moves an node in the solution + to a new position. + The function called is :func:`move_one_node`. + For example if we apply 1-0 exchange in the solution + ``A = [3, 2, 1, 4, 3]`` + we can transfer the fourth element to the second position: + ``A' = [3, 4, 2, 1, 3]`` + + You may provide your own functions to enact a move from + one solution to a neighbor solution. The function must take + the solution as input along with a `seed` input to control + random number generation (see the `seed` input here). + Your function should maintain the solution as a cycle with + equal first and last node and all others appearing once. + Your function should return the new solution. + + max_iterations : int, optional (default=10) + Declared done when this number of consecutive iterations of + the outer loop occurs without any change in the best cost solution. + + N_inner : int, optional (default=100) + The number of iterations of the inner loop. + + alpha : float between (0, 1), optional (default=0.1) + Percentage of threshold decrease when there is at + least one acceptance of a neighbor solution. + If no inner loop moves are accepted the threshold remains unchanged. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + cycle : list of nodes + Returns the cycle (list of nodes) that a salesman + can follow to minimize total weight of the trip. + + Raises + ------ + NetworkXError + If `G` is not complete the algorithm raises an exception. + + Examples + -------- + >>> from networkx.algorithms import approximation as approx + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) + >>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + >>> incycle = ["D", "B", "A", "C", "D"] + >>> cycle = approx.threshold_accepting_tsp(G, incycle, source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + + Notes + ----- + Threshold Accepting is a metaheuristic local search algorithm. + The main characteristic of this algorithm is that it accepts + even solutions which lead to the increase of the cost in order + to escape from low quality local optimal solutions. + + This algorithm needs an initial solution. This solution can be + constructed by a simple greedy algorithm. At every iteration, it + selects thoughtfully a neighbor solution. + Consider $c(x)$ cost of current solution and $c(x')$ cost of + neighbor solution. + If $c(x') - c(x) <= threshold$ then the neighbor solution becomes the current + solution for the next iteration, where the threshold is named threshold. + + In comparison to the Simulated Annealing algorithm, the Threshold + Accepting algorithm does not accept very low quality solutions + (due to the presence of the threshold value). In the case of + Simulated Annealing, even a very low quality solution can + be accepted with probability $p$. + + Time complexity: + It has a running time $O(m * n * |V|)$ where $m$ and $n$ are the number + of times the outer and inner loop run respectively. + + For more information and how algorithm is inspired see: + https://doi.org/10.1016/0021-9991(90)90201-B + + See Also + -------- + simulated_annealing_tsp + + """ + if move == "1-1": + move = swap_two_nodes + elif move == "1-0": + move = move_one_node + if init_cycle == "greedy": + # Construct an initial solution using a greedy algorithm. + cycle = greedy_tsp(G, weight=weight, source=source) + if G.number_of_nodes() == 2: + return cycle + + else: + cycle = list(init_cycle) + if source is None: + source = cycle[0] + elif source != cycle[0]: + raise nx.NetworkXError("source must be first node in init_cycle") + if cycle[0] != cycle[-1]: + raise nx.NetworkXError("init_cycle must be a cycle. (return to start)") + + if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G): + raise nx.NetworkXError("init_cycle is not all and only nodes.") + + # Check that G is a complete graph + N = len(G) - 1 + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G must be a complete graph.") + + if G.number_of_nodes() == 2: + neighbor = list(G.neighbors(source))[0] + return [source, neighbor, source] + + # Find the cost of initial solution + cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle)) + + count = 0 + best_cycle = cycle.copy() + best_cost = cost + while count <= max_iterations: + count += 1 + accepted = False + for i in range(N_inner): + adj_sol = move(cycle, seed) + adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol)) + delta = adj_cost - cost + if delta <= threshold: + accepted = True + + # Set current solution the adjacent solution. + cycle = adj_sol + cost = adj_cost + + if cost < best_cost: + count = 0 + best_cycle = cycle.copy() + best_cost = cost + if accepted: + threshold -= threshold * alpha + + return best_cycle diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/treewidth.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/treewidth.py new file mode 100644 index 0000000000000000000000000000000000000000..31d73f6368237c16ab6a66efee45e768ddfaef52 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/treewidth.py @@ -0,0 +1,252 @@ +"""Functions for computing treewidth decomposition. + +Treewidth of an undirected graph is a number associated with the graph. +It can be defined as the size of the largest vertex set (bag) in a tree +decomposition of the graph minus one. + +`Wikipedia: Treewidth `_ + +The notions of treewidth and tree decomposition have gained their +attractiveness partly because many graph and network problems that are +intractable (e.g., NP-hard) on arbitrary graphs become efficiently +solvable (e.g., with a linear time algorithm) when the treewidth of the +input graphs is bounded by a constant [1]_ [2]_. + +There are two different functions for computing a tree decomposition: +:func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`. + +.. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth + computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275. + http://dx.doi.org/10.1016/j.ic.2009.03.008 + +.. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information + and Computing Sciences, Utrecht University. + Technical Report UU-CS-2005-018. + http://www.cs.uu.nl + +.. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*. + https://web.archive.org/web/20210507025929/http://web.eecs.utk.edu/~cphill25/cs594_spring2015_projects/treewidth.pdf + +""" + +import itertools +import sys +from heapq import heapify, heappop, heappush + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["treewidth_min_degree", "treewidth_min_fill_in"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(returns_graph=True) +def treewidth_min_degree(G): + """Returns a treewidth decomposition using the Minimum Degree heuristic. + + The heuristic chooses the nodes according to their degree, i.e., first + the node with the lowest degree is chosen, then the graph is updated + and the corresponding node is removed. Next, a new node with the lowest + degree is chosen, and so on. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + deg_heuristic = MinDegreeHeuristic(G) + return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph)) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(returns_graph=True) +def treewidth_min_fill_in(G): + """Returns a treewidth decomposition using the Minimum Fill-in heuristic. + + The heuristic chooses a node from the graph, where the number of edges + added turning the neighborhood of the chosen node into clique is as + small as possible. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + return treewidth_decomp(G, min_fill_in_heuristic) + + +class MinDegreeHeuristic: + """Implements the Minimum Degree heuristic. + + The heuristic chooses the nodes according to their degree + (number of neighbors), i.e., first the node with the lowest degree is + chosen, then the graph is updated and the corresponding node is + removed. Next, a new node with the lowest degree is chosen, and so on. + """ + + def __init__(self, graph): + self._graph = graph + + # nodes that have to be updated in the heap before each iteration + self._update_nodes = [] + + self._degreeq = [] # a heapq with 3-tuples (degree,unique_id,node) + self.count = itertools.count() + + # build heap with initial degrees + for n in graph: + self._degreeq.append((len(graph[n]), next(self.count), n)) + heapify(self._degreeq) + + def best_node(self, graph): + # update nodes in self._update_nodes + for n in self._update_nodes: + # insert changed degrees into degreeq + heappush(self._degreeq, (len(graph[n]), next(self.count), n)) + + # get the next valid (minimum degree) node + while self._degreeq: + (min_degree, _, elim_node) = heappop(self._degreeq) + if elim_node not in graph or len(graph[elim_node]) != min_degree: + # outdated entry in degreeq + continue + elif min_degree == len(graph) - 1: + # fully connected: abort condition + return None + + # remember to update nodes in the heap before getting the next node + self._update_nodes = graph[elim_node] + return elim_node + + # the heap is empty: abort + return None + + +def min_fill_in_heuristic(graph): + """Implements the Minimum Degree heuristic. + + Returns the node from the graph, where the number of edges added when + turning the neighborhood of the chosen node into clique is as small as + possible. This algorithm chooses the nodes using the Minimum Fill-In + heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses + additional constant memory.""" + + if len(graph) == 0: + return None + + min_fill_in_node = None + + min_fill_in = sys.maxsize + + # sort nodes by degree + nodes_by_degree = sorted(graph, key=lambda x: len(graph[x])) + min_degree = len(graph[nodes_by_degree[0]]) + + # abort condition (handle complete graph) + if min_degree == len(graph) - 1: + return None + + for node in nodes_by_degree: + num_fill_in = 0 + nbrs = graph[node] + for nbr in nbrs: + # count how many nodes in nbrs current nbr is not connected to + # subtract 1 for the node itself + num_fill_in += len(nbrs - graph[nbr]) - 1 + if num_fill_in >= 2 * min_fill_in: + break + + num_fill_in /= 2 # divide by 2 because of double counting + + if num_fill_in < min_fill_in: # update min-fill-in node + if num_fill_in == 0: + return node + min_fill_in = num_fill_in + min_fill_in_node = node + + return min_fill_in_node + + +@nx._dispatchable(returns_graph=True) +def treewidth_decomp(G, heuristic=min_fill_in_heuristic): + """Returns a treewidth decomposition using the passed heuristic. + + Parameters + ---------- + G : NetworkX graph + heuristic : heuristic function + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + + # make dict-of-sets structure + graph = {n: set(G[n]) - {n} for n in G} + + # stack containing nodes and neighbors in the order from the heuristic + node_stack = [] + + # get first node from heuristic + elim_node = heuristic(graph) + while elim_node is not None: + # connect all neighbors with each other + nbrs = graph[elim_node] + for u, v in itertools.permutations(nbrs, 2): + if v not in graph[u]: + graph[u].add(v) + + # push node and its current neighbors on stack + node_stack.append((elim_node, nbrs)) + + # remove node from graph + for u in graph[elim_node]: + graph[u].remove(elim_node) + + del graph[elim_node] + elim_node = heuristic(graph) + + # the abort condition is met; put all remaining nodes into one bag + decomp = nx.Graph() + first_bag = frozenset(graph.keys()) + decomp.add_node(first_bag) + + treewidth = len(first_bag) - 1 + + while node_stack: + # get node and its neighbors from the stack + (curr_node, nbrs) = node_stack.pop() + + # find a bag all neighbors are in + old_bag = None + for bag in decomp.nodes: + if nbrs <= bag: + old_bag = bag + break + + if old_bag is None: + # no old_bag was found: just connect to the first_bag + old_bag = first_bag + + # create new node for decomposition + nbrs.add(curr_node) + new_bag = frozenset(nbrs) + + # update treewidth + treewidth = max(treewidth, len(new_bag) - 1) + + # add edge to decomposition (implicitly also adds the new node) + decomp.add_edge(old_bag, new_bag) + + return treewidth, decomp diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/vertex_cover.py b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/vertex_cover.py new file mode 100644 index 0000000000000000000000000000000000000000..c71399ebcc9a91a9aa5eead63a7c7307746ae06b --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/approximation/vertex_cover.py @@ -0,0 +1,82 @@ +"""Functions for computing an approximate minimum weight vertex cover. + +A |vertex cover|_ is a subset of nodes such that each edge in the graph +is incident to at least one node in the subset. + +.. _vertex cover: https://en.wikipedia.org/wiki/Vertex_cover +.. |vertex cover| replace:: *vertex cover* + +""" +import networkx as nx + +__all__ = ["min_weighted_vertex_cover"] + + +@nx._dispatchable(node_attrs="weight") +def min_weighted_vertex_cover(G, weight=None): + r"""Returns an approximate minimum weighted vertex cover. + + The set of nodes returned by this function is guaranteed to be a + vertex cover, and the total weight of the set is guaranteed to be at + most twice the total weight of the minimum weight vertex cover. In + other words, + + .. math:: + + w(S) \leq 2 * w(S^*), + + where $S$ is the vertex cover returned by this function, + $S^*$ is the vertex cover of minimum weight out of all vertex + covers of the graph, and $w$ is the function that computes the + sum of the weights of each node in that given set. + + Parameters + ---------- + G : NetworkX graph + + weight : string, optional (default = None) + If None, every node has weight 1. If a string, use this node + attribute as the node weight. A node without this attribute is + assumed to have weight 1. + + Returns + ------- + min_weighted_cover : set + Returns a set of nodes whose weight sum is no more than twice + the weight sum of the minimum weight vertex cover. + + Notes + ----- + For a directed graph, a vertex cover has the same definition: a set + of nodes such that each edge in the graph is incident to at least + one node in the set. Whether the node is the head or tail of the + directed edge is ignored. + + This is the local-ratio algorithm for computing an approximate + vertex cover. The algorithm greedily reduces the costs over edges, + iteratively building a cover. The worst-case runtime of this + implementation is $O(m \log n)$, where $n$ is the number + of nodes and $m$ the number of edges in the graph. + + References + ---------- + .. [1] Bar-Yehuda, R., and Even, S. (1985). "A local-ratio theorem for + approximating the weighted vertex cover problem." + *Annals of Discrete Mathematics*, 25, 27–46 + + + """ + cost = dict(G.nodes(data=weight, default=1)) + # While there are uncovered edges, choose an uncovered and update + # the cost of the remaining edges. + cover = set() + for u, v in G.edges(): + if u in cover or v in cover: + continue + if cost[u] <= cost[v]: + cover.add(u) + cost[v] -= cost[u] + else: + cover.add(v) + cost[u] -= cost[v] + return cover diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/boundary.py b/venv/lib/python3.10/site-packages/networkx/algorithms/boundary.py new file mode 100644 index 0000000000000000000000000000000000000000..fef9ba223699b61ac4f26cb7b1152cea8238f899 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/boundary.py @@ -0,0 +1,167 @@ +"""Routines to find the boundary of a set of nodes. + +An edge boundary is a set of edges, each of which has exactly one +endpoint in a given set of nodes (or, in the case of directed graphs, +the set of edges whose source node is in the set). + +A node boundary of a set *S* of nodes is the set of (out-)neighbors of +nodes in *S* that are outside *S*. + +""" +from itertools import chain + +import networkx as nx + +__all__ = ["edge_boundary", "node_boundary"] + + +@nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data") +def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None): + """Returns the edge boundary of `nbunch1`. + + The *edge boundary* of a set *S* with respect to a set *T* is the + set of edges (*u*, *v*) such that *u* is in *S* and *v* is in *T*. + If *T* is not specified, it is assumed to be the set of all nodes + not in *S*. + + Parameters + ---------- + G : NetworkX graph + + nbunch1 : iterable + Iterable of nodes in the graph representing the set of nodes + whose edge boundary will be returned. (This is the set *S* from + the definition above.) + + nbunch2 : iterable + Iterable of nodes representing the target (or "exterior") set of + nodes. (This is the set *T* from the definition above.) If not + specified, this is assumed to be the set of all nodes in `G` + not in `nbunch1`. + + keys : bool + This parameter has the same meaning as in + :meth:`MultiGraph.edges`. + + data : bool or object + This parameter has the same meaning as in + :meth:`MultiGraph.edges`. + + default : object + This parameter has the same meaning as in + :meth:`MultiGraph.edges`. + + Returns + ------- + iterator + An iterator over the edges in the boundary of `nbunch1` with + respect to `nbunch2`. If `keys`, `data`, or `default` + are specified and `G` is a multigraph, then edges are returned + with keys and/or data, as in :meth:`MultiGraph.edges`. + + Examples + -------- + >>> G = nx.wheel_graph(6) + + When nbunch2=None: + + >>> list(nx.edge_boundary(G, (1, 3))) + [(1, 0), (1, 2), (1, 5), (3, 0), (3, 2), (3, 4)] + + When nbunch2 is given: + + >>> list(nx.edge_boundary(G, (1, 3), (2, 0))) + [(1, 0), (1, 2), (3, 0), (3, 2)] + + Notes + ----- + Any element of `nbunch` that is not in the graph `G` will be + ignored. + + `nbunch1` and `nbunch2` are usually meant to be disjoint, but in + the interest of speed and generality, that is not required here. + + """ + nset1 = {n for n in nbunch1 if n in G} + # Here we create an iterator over edges incident to nodes in the set + # `nset1`. The `Graph.edges()` method does not provide a guarantee + # on the orientation of the edges, so our algorithm below must + # handle the case in which exactly one orientation, either (u, v) or + # (v, u), appears in this iterable. + if G.is_multigraph(): + edges = G.edges(nset1, data=data, keys=keys, default=default) + else: + edges = G.edges(nset1, data=data, default=default) + # If `nbunch2` is not provided, then it is assumed to be the set + # complement of `nbunch1`. For the sake of efficiency, this is + # implemented by using the `not in` operator, instead of by creating + # an additional set and using the `in` operator. + if nbunch2 is None: + return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1)) + nset2 = set(nbunch2) + return ( + e + for e in edges + if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2) + ) + + +@nx._dispatchable +def node_boundary(G, nbunch1, nbunch2=None): + """Returns the node boundary of `nbunch1`. + + The *node boundary* of a set *S* with respect to a set *T* is the + set of nodes *v* in *T* such that for some *u* in *S*, there is an + edge joining *u* to *v*. If *T* is not specified, it is assumed to + be the set of all nodes not in *S*. + + Parameters + ---------- + G : NetworkX graph + + nbunch1 : iterable + Iterable of nodes in the graph representing the set of nodes + whose node boundary will be returned. (This is the set *S* from + the definition above.) + + nbunch2 : iterable + Iterable of nodes representing the target (or "exterior") set of + nodes. (This is the set *T* from the definition above.) If not + specified, this is assumed to be the set of all nodes in `G` + not in `nbunch1`. + + Returns + ------- + set + The node boundary of `nbunch1` with respect to `nbunch2`. + + Examples + -------- + >>> G = nx.wheel_graph(6) + + When nbunch2=None: + + >>> list(nx.node_boundary(G, (3, 4))) + [0, 2, 5] + + When nbunch2 is given: + + >>> list(nx.node_boundary(G, (3, 4), (0, 1, 5))) + [0, 5] + + Notes + ----- + Any element of `nbunch` that is not in the graph `G` will be + ignored. + + `nbunch1` and `nbunch2` are usually meant to be disjoint, but in + the interest of speed and generality, that is not required here. + + """ + nset1 = {n for n in nbunch1 if n in G} + bdy = set(chain.from_iterable(G[v] for v in nset1)) - nset1 + # If `nbunch2` is not specified, it is assumed to be the set + # complement of `nbunch1`. + if nbunch2 is not None: + bdy &= set(nbunch2) + return bdy diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/bridges.py b/venv/lib/python3.10/site-packages/networkx/algorithms/bridges.py new file mode 100644 index 0000000000000000000000000000000000000000..e076a256cb8c9b5431aea2e1bce8549b117e841b --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/bridges.py @@ -0,0 +1,205 @@ +"""Bridge-finding algorithms.""" +from itertools import chain + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["bridges", "has_bridges", "local_bridges"] + + +@not_implemented_for("directed") +@nx._dispatchable +def bridges(G, root=None): + """Generate all bridges in a graph. + + A *bridge* in a graph is an edge whose removal causes the number of + connected components of the graph to increase. Equivalently, a bridge is an + edge that does not belong to any cycle. Bridges are also known as cut-edges, + isthmuses, or cut arcs. + + Parameters + ---------- + G : undirected graph + + root : node (optional) + A node in the graph `G`. If specified, only the bridges in the + connected component containing this node will be returned. + + Yields + ------ + e : edge + An edge in the graph whose removal disconnects the graph (or + causes the number of connected components to increase). + + Raises + ------ + NodeNotFound + If `root` is not in the graph `G`. + + NetworkXNotImplemented + If `G` is a directed graph. + + Examples + -------- + The barbell graph with parameter zero has a single bridge: + + >>> G = nx.barbell_graph(10, 0) + >>> list(nx.bridges(G)) + [(9, 10)] + + Notes + ----- + This is an implementation of the algorithm described in [1]_. An edge is a + bridge if and only if it is not contained in any chain. Chains are found + using the :func:`networkx.chain_decomposition` function. + + The algorithm described in [1]_ requires a simple graph. If the provided + graph is a multigraph, we convert it to a simple graph and verify that any + bridges discovered by the chain decomposition algorithm are not multi-edges. + + Ignoring polylogarithmic factors, the worst-case time complexity is the + same as the :func:`networkx.chain_decomposition` function, + $O(m + n)$, where $n$ is the number of nodes in the graph and $m$ is + the number of edges. + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions + """ + multigraph = G.is_multigraph() + H = nx.Graph(G) if multigraph else G + chains = nx.chain_decomposition(H, root=root) + chain_edges = set(chain.from_iterable(chains)) + H_copy = H.copy() + if root is not None: + H = H.subgraph(nx.node_connected_component(H, root)).copy() + for u, v in H.edges(): + if (u, v) not in chain_edges and (v, u) not in chain_edges: + if multigraph and len(G[u][v]) > 1: + continue + yield u, v + + +@not_implemented_for("directed") +@nx._dispatchable +def has_bridges(G, root=None): + """Decide whether a graph has any bridges. + + A *bridge* in a graph is an edge whose removal causes the number of + connected components of the graph to increase. + + Parameters + ---------- + G : undirected graph + + root : node (optional) + A node in the graph `G`. If specified, only the bridges in the + connected component containing this node will be considered. + + Returns + ------- + bool + Whether the graph (or the connected component containing `root`) + has any bridges. + + Raises + ------ + NodeNotFound + If `root` is not in the graph `G`. + + NetworkXNotImplemented + If `G` is a directed graph. + + Examples + -------- + The barbell graph with parameter zero has a single bridge:: + + >>> G = nx.barbell_graph(10, 0) + >>> nx.has_bridges(G) + True + + On the other hand, the cycle graph has no bridges:: + + >>> G = nx.cycle_graph(5) + >>> nx.has_bridges(G) + False + + Notes + ----- + This implementation uses the :func:`networkx.bridges` function, so + it shares its worst-case time complexity, $O(m + n)$, ignoring + polylogarithmic factors, where $n$ is the number of nodes in the + graph and $m$ is the number of edges. + + """ + try: + next(bridges(G, root=root)) + except StopIteration: + return False + else: + return True + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def local_bridges(G, with_span=True, weight=None): + """Iterate over local bridges of `G` optionally computing the span + + A *local bridge* is an edge whose endpoints have no common neighbors. + That is, the edge is not part of a triangle in the graph. + + The *span* of a *local bridge* is the shortest path length between + the endpoints if the local bridge is removed. + + Parameters + ---------- + G : undirected graph + + with_span : bool + If True, yield a 3-tuple `(u, v, span)` + + weight : function, string or None (default: None) + If function, used to compute edge weights for the span. + If string, the edge data attribute used in calculating span. + If None, all edges have weight 1. + + Yields + ------ + e : edge + The local bridges as an edge 2-tuple of nodes `(u, v)` or + as a 3-tuple `(u, v, span)` when `with_span is True`. + + Raises + ------ + NetworkXNotImplemented + If `G` is a directed graph or multigraph. + + Examples + -------- + A cycle graph has every edge a local bridge with span N-1. + + >>> G = nx.cycle_graph(9) + >>> (0, 8, 8) in set(nx.local_bridges(G)) + True + """ + if with_span is not True: + for u, v in G.edges: + if not (set(G[u]) & set(G[v])): + yield u, v + else: + wt = nx.weighted._weight_function(G, weight) + for u, v in G.edges: + if not (set(G[u]) & set(G[v])): + enodes = {u, v} + + def hide_edge(n, nbr, d): + if n not in enodes or nbr not in enodes: + return wt(n, nbr, d) + return None + + try: + span = nx.shortest_path_length(G, u, v, weight=hide_edge) + yield u, v, span + except nx.NetworkXNoPath: + yield u, v, float("inf") diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/broadcasting.py b/venv/lib/python3.10/site-packages/networkx/algorithms/broadcasting.py new file mode 100644 index 0000000000000000000000000000000000000000..9b362a0e1346c29f7207dc0afce392118daaeb2b --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/broadcasting.py @@ -0,0 +1,155 @@ +"""Routines to calculate the broadcast time of certain graphs. + +Broadcasting is an information dissemination problem in which a node in a graph, +called the originator, must distribute a message to all other nodes by placing +a series of calls along the edges of the graph. Once informed, other nodes aid +the originator in distributing the message. + +The broadcasting must be completed as quickly as possible subject to the +following constraints: +- Each call requires one unit of time. +- A node can only participate in one call per unit of time. +- Each call only involves two adjacent nodes: a sender and a receiver. +""" + +import networkx as nx +from networkx import NetworkXError +from networkx.utils import not_implemented_for + +__all__ = [ + "tree_broadcast_center", + "tree_broadcast_time", +] + + +def _get_max_broadcast_value(G, U, v, values): + adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True) + return max(values[u] + i for i, u in enumerate(adj, start=1)) + + +def _get_broadcast_centers(G, v, values, target): + adj = sorted(G.neighbors(v), key=values.get, reverse=True) + j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target) + return set([v] + adj[:j]) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def tree_broadcast_center(G): + """Return the Broadcast Center of the tree `G`. + + The broadcast center of a graph G denotes the set of nodes having + minimum broadcast time [1]_. This is a linear algorithm for determining + the broadcast center of a tree with ``N`` nodes, as a by-product it also + determines the broadcast time from the broadcast center. + + Parameters + ---------- + G : undirected graph + The graph should be an undirected tree + + Returns + ------- + BC : (int, set) tuple + minimum broadcast number of the tree, set of broadcast centers + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T, + Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981) + """ + # Assert that the graph G is a tree + if not nx.is_tree(G): + NetworkXError("Input graph is not a tree") + # step 0 + if G.number_of_nodes() == 2: + return 1, set(G.nodes()) + if G.number_of_nodes() == 1: + return 0, set(G.nodes()) + + # step 1 + U = {node for node, deg in G.degree if deg == 1} + values = {n: 0 for n in U} + T = G.copy() + T.remove_nodes_from(U) + + # step 2 + W = {node for node, deg in T.degree if deg == 1} + values.update((w, G.degree[w] - 1) for w in W) + + # step 3 + while T.number_of_nodes() >= 2: + # step 4 + w = min(W, key=lambda n: values[n]) + v = next(T.neighbors(w)) + + # step 5 + U.add(w) + W.remove(w) + T.remove_node(w) + + # step 6 + if T.degree(v) == 1: + # update t(v) + values.update({v: _get_max_broadcast_value(G, U, v, values)}) + W.add(v) + + # step 7 + v = nx.utils.arbitrary_element(T) + b_T = _get_max_broadcast_value(G, U, v, values) + return b_T, _get_broadcast_centers(G, v, values, b_T) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def tree_broadcast_time(G, node=None): + """Return the Broadcast Time of the tree `G`. + + The minimum broadcast time of a node is defined as the minimum amount + of time required to complete broadcasting starting from the + originator. The broadcast time of a graph is the maximum over + all nodes of the minimum broadcast time from that node [1]_. + This function returns the minimum broadcast time of `node`. + If `node` is None the broadcast time for the graph is returned. + + Parameters + ---------- + G : undirected graph + The graph should be an undirected tree + node: int, optional + index of starting node. If `None`, the algorithm returns the broadcast + time of the tree. + + Returns + ------- + BT : int + Broadcast Time of a node in a tree + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Harutyunyan, H. A. and Li, Z. + "A Simple Construction of Broadcast Graphs." + In Computing and Combinatorics. COCOON 2019 + (Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019. + """ + b_T, b_C = tree_broadcast_center(G) + if node is not None: + return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C) + dist_from_center = dict.fromkeys(G, len(G)) + for u in b_C: + for v, dist in nx.shortest_path_length(G, u).items(): + if dist < dist_from_center[v]: + dist_from_center[v] = dist + return b_T + max(dist_from_center.values()) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/chordal.py b/venv/lib/python3.10/site-packages/networkx/algorithms/chordal.py new file mode 100644 index 0000000000000000000000000000000000000000..6bd3ccd2ea3eb3bbca170313dd6ec02c433d6a38 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/chordal.py @@ -0,0 +1,442 @@ +""" +Algorithms for chordal graphs. + +A graph is chordal if every cycle of length at least 4 has a chord +(an edge joining two nodes not adjacent in the cycle). +https://en.wikipedia.org/wiki/Chordal_graph +""" +import sys + +import networkx as nx +from networkx.algorithms.components import connected_components +from networkx.utils import arbitrary_element, not_implemented_for + +__all__ = [ + "is_chordal", + "find_induced_nodes", + "chordal_graph_cliques", + "chordal_graph_treewidth", + "NetworkXTreewidthBoundExceeded", + "complete_to_chordal_graph", +] + + +class NetworkXTreewidthBoundExceeded(nx.NetworkXException): + """Exception raised when a treewidth bound has been provided and it has + been exceeded""" + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_chordal(G): + """Checks whether G is a chordal graph. + + A graph is chordal if every cycle of length at least 4 has a chord + (an edge joining two nodes not adjacent in the cycle). + + Parameters + ---------- + G : graph + A NetworkX graph. + + Returns + ------- + chordal : bool + True if G is a chordal graph and False otherwise. + + Raises + ------ + NetworkXNotImplemented + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + + Examples + -------- + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... ] + >>> G = nx.Graph(e) + >>> nx.is_chordal(G) + True + + Notes + ----- + The routine tries to go through every node following maximum cardinality + search. It returns False when it finds that the separator for any node + is not a clique. Based on the algorithms in [1]_. + + Self loops are ignored. + + References + ---------- + .. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms + to test chordality of graphs, test acyclicity of hypergraphs, and + selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984), + pp. 566–579. + """ + if len(G.nodes) <= 3: + return True + return len(_find_chordality_breaker(G)) == 0 + + +@nx._dispatchable +def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): + """Returns the set of induced nodes in the path from s to t. + + Parameters + ---------- + G : graph + A chordal NetworkX graph + s : node + Source node to look for induced nodes + t : node + Destination node to look for induced nodes + treewidth_bound: float + Maximum treewidth acceptable for the graph H. The search + for induced nodes will end as soon as the treewidth_bound is exceeded. + + Returns + ------- + induced_nodes : Set of nodes + The set of induced nodes in the path from s to t in G + + Raises + ------ + NetworkXError + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + If the input graph is an instance of one of these classes, a + :exc:`NetworkXError` is raised. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + + Examples + -------- + >>> G = nx.Graph() + >>> G = nx.generators.classic.path_graph(10) + >>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2) + >>> sorted(induced_nodes) + [1, 2, 3, 4, 5, 6, 7, 8, 9] + + Notes + ----- + G must be a chordal graph and (s,t) an edge that is not in G. + + If a treewidth_bound is provided, the search for induced nodes will end + as soon as the treewidth_bound is exceeded. + + The algorithm is inspired by Algorithm 4 in [1]_. + A formal definition of induced node can also be found on that reference. + + Self Loops are ignored + + References + ---------- + .. [1] Learning Bounded Treewidth Bayesian Networks. + Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008. + http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf + """ + if not is_chordal(G): + raise nx.NetworkXError("Input graph is not chordal.") + + H = nx.Graph(G) + H.add_edge(s, t) + induced_nodes = set() + triplet = _find_chordality_breaker(H, s, treewidth_bound) + while triplet: + (u, v, w) = triplet + induced_nodes.update(triplet) + for n in triplet: + if n != s: + H.add_edge(s, n) + triplet = _find_chordality_breaker(H, s, treewidth_bound) + if induced_nodes: + # Add t and the second node in the induced path from s to t. + induced_nodes.add(t) + for u in G[s]: + if len(induced_nodes & set(G[u])) == 2: + induced_nodes.add(u) + break + return induced_nodes + + +@nx._dispatchable +def chordal_graph_cliques(G): + """Returns all maximal cliques of a chordal graph. + + The algorithm breaks the graph in connected components and performs a + maximum cardinality search in each component to get the cliques. + + Parameters + ---------- + G : graph + A NetworkX graph + + Yields + ------ + frozenset of nodes + Maximal cliques, each of which is a frozenset of + nodes in `G`. The order of cliques is arbitrary. + + Raises + ------ + NetworkXError + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + + Examples + -------- + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... (7, 8), + ... ] + >>> G = nx.Graph(e) + >>> G.add_node(9) + >>> cliques = [c for c in chordal_graph_cliques(G)] + >>> cliques[0] + frozenset({1, 2, 3}) + """ + for C in (G.subgraph(c).copy() for c in connected_components(G)): + if C.number_of_nodes() == 1: + if nx.number_of_selfloops(C) > 0: + raise nx.NetworkXError("Input graph is not chordal.") + yield frozenset(C.nodes()) + else: + unnumbered = set(C.nodes()) + v = arbitrary_element(C) + unnumbered.remove(v) + numbered = {v} + clique_wanna_be = {v} + while unnumbered: + v = _max_cardinality_node(C, unnumbered, numbered) + unnumbered.remove(v) + numbered.add(v) + new_clique_wanna_be = set(C.neighbors(v)) & numbered + sg = C.subgraph(clique_wanna_be) + if _is_complete_graph(sg): + new_clique_wanna_be.add(v) + if not new_clique_wanna_be >= clique_wanna_be: + yield frozenset(clique_wanna_be) + clique_wanna_be = new_clique_wanna_be + else: + raise nx.NetworkXError("Input graph is not chordal.") + yield frozenset(clique_wanna_be) + + +@nx._dispatchable +def chordal_graph_treewidth(G): + """Returns the treewidth of the chordal graph G. + + Parameters + ---------- + G : graph + A NetworkX graph + + Returns + ------- + treewidth : int + The size of the largest clique in the graph minus one. + + Raises + ------ + NetworkXError + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + + Examples + -------- + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... (7, 8), + ... ] + >>> G = nx.Graph(e) + >>> G.add_node(9) + >>> nx.chordal_graph_treewidth(G) + 3 + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth + """ + if not is_chordal(G): + raise nx.NetworkXError("Input graph is not chordal.") + + max_clique = -1 + for clique in nx.chordal_graph_cliques(G): + max_clique = max(max_clique, len(clique)) + return max_clique - 1 + + +def _is_complete_graph(G): + """Returns True if G is a complete graph.""" + if nx.number_of_selfloops(G) > 0: + raise nx.NetworkXError("Self loop found in _is_complete_graph()") + n = G.number_of_nodes() + if n < 2: + return True + e = G.number_of_edges() + max_edges = (n * (n - 1)) / 2 + return e == max_edges + + +def _find_missing_edge(G): + """Given a non-complete graph G, returns a missing edge.""" + nodes = set(G) + for u in G: + missing = nodes - set(list(G[u].keys()) + [u]) + if missing: + return (u, missing.pop()) + + +def _max_cardinality_node(G, choices, wanna_connect): + """Returns a the node in choices that has more connections in G + to nodes in wanna_connect. + """ + max_number = -1 + for x in choices: + number = len([y for y in G[x] if y in wanna_connect]) + if number > max_number: + max_number = number + max_cardinality_node = x + return max_cardinality_node + + +def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): + """Given a graph G, starts a max cardinality search + (starting from s if s is given and from an arbitrary node otherwise) + trying to find a non-chordal cycle. + + If it does find one, it returns (u,v,w) where u,v,w are the three + nodes that together with s are involved in the cycle. + + It ignores any self loops. + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") + unnumbered = set(G) + if s is None: + s = arbitrary_element(G) + unnumbered.remove(s) + numbered = {s} + current_treewidth = -1 + while unnumbered: # and current_treewidth <= treewidth_bound: + v = _max_cardinality_node(G, unnumbered, numbered) + unnumbered.remove(v) + numbered.add(v) + clique_wanna_be = set(G[v]) & numbered + sg = G.subgraph(clique_wanna_be) + if _is_complete_graph(sg): + # The graph seems to be chordal by now. We update the treewidth + current_treewidth = max(current_treewidth, len(clique_wanna_be)) + if current_treewidth > treewidth_bound: + raise nx.NetworkXTreewidthBoundExceeded( + f"treewidth_bound exceeded: {current_treewidth}" + ) + else: + # sg is not a clique, + # look for an edge that is not included in sg + (u, w) = _find_missing_edge(sg) + return (u, v, w) + return () + + +@not_implemented_for("directed") +@nx._dispatchable(returns_graph=True) +def complete_to_chordal_graph(G): + """Return a copy of G completed to a chordal graph + + Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is + called chordal if for each cycle with length bigger than 3, there exist + two non-adjacent nodes connected by an edge (called a chord). + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + H : NetworkX graph + The chordal enhancement of G + alpha : Dictionary + The elimination ordering of nodes of G + + Notes + ----- + There are different approaches to calculate the chordal + enhancement of a graph. The algorithm used here is called + MCS-M and gives at least minimal (local) triangulation of graph. Note + that this triangulation is not necessarily a global minimum. + + https://en.wikipedia.org/wiki/Chordal_graph + + References + ---------- + .. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004) + Maximum Cardinality Search for Computing Minimal Triangulations of + Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3. + + Examples + -------- + >>> from networkx.algorithms.chordal import complete_to_chordal_graph + >>> G = nx.wheel_graph(10) + >>> H, alpha = complete_to_chordal_graph(G) + """ + H = G.copy() + alpha = {node: 0 for node in H} + if nx.is_chordal(H): + return H, alpha + chords = set() + weight = {node: 0 for node in H.nodes()} + unnumbered_nodes = list(H.nodes()) + for i in range(len(H.nodes()), 0, -1): + # get the node in unnumbered_nodes with the maximum weight + z = max(unnumbered_nodes, key=lambda node: weight[node]) + unnumbered_nodes.remove(z) + alpha[z] = i + update_nodes = [] + for y in unnumbered_nodes: + if G.has_edge(y, z): + update_nodes.append(y) + else: + # y_weight will be bigger than node weights between y and z + y_weight = weight[y] + lower_nodes = [ + node for node in unnumbered_nodes if weight[node] < y_weight + ] + if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z): + update_nodes.append(y) + chords.add((z, y)) + # during calculation of paths the weights should not be updated + for node in update_nodes: + weight[node] += 1 + H.add_edges_from(chords) + return H, alpha diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py b/venv/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py new file mode 100644 index 0000000000000000000000000000000000000000..07316dc3ae235ed165be6b73994177fb8920510d --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py @@ -0,0 +1,162 @@ +""" +Communicability. +""" +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["communicability", "communicability_exp"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def communicability(G): + r"""Returns communicability between all pairs of nodes in G. + + The communicability between pairs of nodes in G is the sum of + walks of different lengths starting at node u and ending at node v. + + Parameters + ---------- + G: graph + + Returns + ------- + comm: dictionary of dictionaries + Dictionary of dictionaries keyed by nodes with communicability + as the value. + + Raises + ------ + NetworkXError + If the graph is not undirected and simple. + + See Also + -------- + communicability_exp: + Communicability between all pairs of nodes in G using spectral + decomposition. + communicability_betweenness_centrality: + Communicability betweenness centrality for each node in G. + + Notes + ----- + This algorithm uses a spectral decomposition of the adjacency matrix. + Let G=(V,E) be a simple undirected graph. Using the connection between + the powers of the adjacency matrix and the number of walks in the graph, + the communicability between nodes `u` and `v` based on the graph spectrum + is [1]_ + + .. math:: + C(u,v)=\sum_{j=1}^{n}\phi_{j}(u)\phi_{j}(v)e^{\lambda_{j}}, + + where `\phi_{j}(u)` is the `u\rm{th}` element of the `j\rm{th}` orthonormal + eigenvector of the adjacency matrix associated with the eigenvalue + `\lambda_{j}`. + + References + ---------- + .. [1] Ernesto Estrada, Naomichi Hatano, + "Communicability in complex networks", + Phys. Rev. E 77, 036111 (2008). + https://arxiv.org/abs/0707.0756 + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) + >>> c = nx.communicability(G) + """ + import numpy as np + + nodelist = list(G) # ordering of nodes in matrix + A = nx.to_numpy_array(G, nodelist) + # convert to 0-1 matrix + A[A != 0.0] = 1 + w, vec = np.linalg.eigh(A) + expw = np.exp(w) + mapping = dict(zip(nodelist, range(len(nodelist)))) + c = {} + # computing communicabilities + for u in G: + c[u] = {} + for v in G: + s = 0 + p = mapping[u] + q = mapping[v] + for j in range(len(nodelist)): + s += vec[:, j][p] * vec[:, j][q] * expw[j] + c[u][v] = float(s) + return c + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def communicability_exp(G): + r"""Returns communicability between all pairs of nodes in G. + + Communicability between pair of node (u,v) of node in G is the sum of + walks of different lengths starting at node u and ending at node v. + + Parameters + ---------- + G: graph + + Returns + ------- + comm: dictionary of dictionaries + Dictionary of dictionaries keyed by nodes with communicability + as the value. + + Raises + ------ + NetworkXError + If the graph is not undirected and simple. + + See Also + -------- + communicability: + Communicability between pairs of nodes in G. + communicability_betweenness_centrality: + Communicability betweenness centrality for each node in G. + + Notes + ----- + This algorithm uses matrix exponentiation of the adjacency matrix. + + Let G=(V,E) be a simple undirected graph. Using the connection between + the powers of the adjacency matrix and the number of walks in the graph, + the communicability between nodes u and v is [1]_, + + .. math:: + C(u,v) = (e^A)_{uv}, + + where `A` is the adjacency matrix of G. + + References + ---------- + .. [1] Ernesto Estrada, Naomichi Hatano, + "Communicability in complex networks", + Phys. Rev. E 77, 036111 (2008). + https://arxiv.org/abs/0707.0756 + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) + >>> c = nx.communicability_exp(G) + """ + import scipy as sp + + nodelist = list(G) # ordering of nodes in matrix + A = nx.to_numpy_array(G, nodelist) + # convert to 0-1 matrix + A[A != 0.0] = 1 + # communicability matrix + expA = sp.linalg.expm(A) + mapping = dict(zip(nodelist, range(len(nodelist)))) + c = {} + for u in G: + c[u] = {} + for v in G: + c[u][v] = float(expA[mapping[u], mapping[v]]) + return c diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/covering.py b/venv/lib/python3.10/site-packages/networkx/algorithms/covering.py new file mode 100644 index 0000000000000000000000000000000000000000..bed482bc4b346eb9ff1adb32406daa0b3dba2579 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/covering.py @@ -0,0 +1,142 @@ +""" Functions related to graph covers.""" + +from functools import partial +from itertools import chain + +import networkx as nx +from networkx.utils import arbitrary_element, not_implemented_for + +__all__ = ["min_edge_cover", "is_edge_cover"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def min_edge_cover(G, matching_algorithm=None): + """Returns the min cardinality edge cover of the graph as a set of edges. + + A smallest edge cover can be found in polynomial time by finding + a maximum matching and extending it greedily so that all nodes + are covered. This function follows that process. A maximum matching + algorithm can be specified for the first step of the algorithm. + The resulting set may return a set with one 2-tuple for each edge, + (the usual case) or with both 2-tuples `(u, v)` and `(v, u)` for + each edge. The latter is only done when a bipartite matching algorithm + is specified as `matching_algorithm`. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + matching_algorithm : function + A function that returns a maximum cardinality matching for `G`. + The function must take one input, the graph `G`, and return + either a set of edges (with only one direction for the pair of nodes) + or a dictionary mapping each node to its mate. If not specified, + :func:`~networkx.algorithms.matching.max_weight_matching` is used. + Common bipartite matching functions include + :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching` + or + :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`. + + Returns + ------- + min_cover : set + + A set of the edges in a minimum edge cover in the form of tuples. + It contains only one of the equivalent 2-tuples `(u, v)` and `(v, u)` + for each edge. If a bipartite method is used to compute the matching, + the returned set contains both the 2-tuples `(u, v)` and `(v, u)` + for each edge of a minimum edge cover. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> sorted(nx.min_edge_cover(G)) + [(2, 1), (3, 0)] + + Notes + ----- + An edge cover of a graph is a set of edges such that every node of + the graph is incident to at least one edge of the set. + The minimum edge cover is an edge covering of smallest cardinality. + + Due to its implementation, the worst-case running time of this algorithm + is bounded by the worst-case running time of the function + ``matching_algorithm``. + + Minimum edge cover for `G` can also be found using the `min_edge_covering` + function in :mod:`networkx.algorithms.bipartite.covering` which is + simply this function with a default matching algorithm of + :func:`~networkx.algorithms.bipartite.matching.hopcraft_karp_matching` + """ + if len(G) == 0: + return set() + if nx.number_of_isolates(G) > 0: + # ``min_cover`` does not exist as there is an isolated node + raise nx.NetworkXException( + "Graph has a node with no edge incident on it, so no edge cover exists." + ) + if matching_algorithm is None: + matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True) + maximum_matching = matching_algorithm(G) + # ``min_cover`` is superset of ``maximum_matching`` + try: + # bipartite matching algs return dict so convert if needed + min_cover = set(maximum_matching.items()) + bipartite_cover = True + except AttributeError: + min_cover = maximum_matching + bipartite_cover = False + # iterate for uncovered nodes + uncovered_nodes = set(G) - {v for u, v in min_cover} - {u for u, v in min_cover} + for v in uncovered_nodes: + # Since `v` is uncovered, each edge incident to `v` will join it + # with a covered node (otherwise, if there were an edge joining + # uncovered nodes `u` and `v`, the maximum matching algorithm + # would have found it), so we can choose an arbitrary edge + # incident to `v`. (This applies only in a simple graph, not a + # multigraph.) + u = arbitrary_element(G[v]) + min_cover.add((u, v)) + if bipartite_cover: + min_cover.add((v, u)) + return min_cover + + +@not_implemented_for("directed") +@nx._dispatchable +def is_edge_cover(G, cover): + """Decides whether a set of edges is a valid edge cover of the graph. + + Given a set of edges, whether it is an edge covering can + be decided if we just check whether all nodes of the graph + has an edge from the set, incident on it. + + Parameters + ---------- + G : NetworkX graph + An undirected bipartite graph. + + cover : set + Set of edges to be checked. + + Returns + ------- + bool + Whether the set of edges is a valid edge cover of the graph. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> cover = {(2, 1), (3, 0)} + >>> nx.is_edge_cover(G, cover) + True + + Notes + ----- + An edge cover of a graph is a set of edges such that every node of + the graph is incident to at least one edge of the set. + """ + return set(G) <= set(chain.from_iterable(cover)) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py b/venv/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py new file mode 100644 index 0000000000000000000000000000000000000000..20c1086d6d2277cf8f2987a51d0441d6b7374d05 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py @@ -0,0 +1,951 @@ +"""Graph diameter, radius, eccentricity and other properties.""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "eccentricity", + "diameter", + "radius", + "periphery", + "center", + "barycenter", + "resistance_distance", + "kemeny_constant", + "effective_graph_resistance", +] + + +def _extrema_bounding(G, compute="diameter", weight=None): + """Compute requested extreme distance metric of undirected graph G + + Computation is based on smart lower and upper bounds, and in practice + linear in the number of nodes, rather than quadratic (except for some + border cases such as complete graphs or circle shaped graphs). + + Parameters + ---------- + G : NetworkX graph + An undirected graph + + compute : string denoting the requesting metric + "diameter" for the maximal eccentricity value, + "radius" for the minimal eccentricity value, + "periphery" for the set of nodes with eccentricity equal to the diameter, + "center" for the set of nodes with eccentricity equal to the radius, + "eccentricities" for the maximum distance from each node to all other nodes in G + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + value : value of the requested metric + int for "diameter" and "radius" or + list of nodes for "center" and "periphery" or + dictionary of eccentricity values keyed by node for "eccentricities" + + Raises + ------ + NetworkXError + If the graph consists of multiple components + ValueError + If `compute` is not one of "diameter", "radius", "periphery", "center", or "eccentricities". + + Notes + ----- + This algorithm was proposed in [1]_ and discussed further in [2]_ and [3]_. + + References + ---------- + .. [1] F. W. Takes, W. A. Kosters, + "Determining the diameter of small world networks." + Proceedings of the 20th ACM international conference on Information and knowledge management, 2011 + https://dl.acm.org/doi/abs/10.1145/2063576.2063748 + .. [2] F. W. Takes, W. A. Kosters, + "Computing the Eccentricity Distribution of Large Graphs." + Algorithms, 2013 + https://www.mdpi.com/1999-4893/6/1/100 + .. [3] M. Borassi, P. Crescenzi, M. Habib, W. A. Kosters, A. Marino, F. W. Takes, + "Fast diameter and radius BFS-based computation in (weakly connected) real-world graphs: With an application to the six degrees of separation games. " + Theoretical Computer Science, 2015 + https://www.sciencedirect.com/science/article/pii/S0304397515001644 + """ + # init variables + degrees = dict(G.degree()) # start with the highest degree node + minlowernode = max(degrees, key=degrees.get) + N = len(degrees) # number of nodes + # alternate between smallest lower and largest upper bound + high = False + # status variables + ecc_lower = dict.fromkeys(G, 0) + ecc_upper = dict.fromkeys(G, N) + candidates = set(G) + + # (re)set bound extremes + minlower = N + maxlower = 0 + minupper = N + maxupper = 0 + + # repeat the following until there are no more candidates + while candidates: + if high: + current = maxuppernode # select node with largest upper bound + else: + current = minlowernode # select node with smallest lower bound + high = not high + + # get distances from/to current node and derive eccentricity + dist = nx.shortest_path_length(G, source=current, weight=weight) + + if len(dist) != N: + msg = "Cannot compute metric because graph is not connected." + raise nx.NetworkXError(msg) + current_ecc = max(dist.values()) + + # print status update + # print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/" + # + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is " + # + str(current_ecc)) + # print(ecc_upper) + + # (re)set bound extremes + maxuppernode = None + minlowernode = None + + # update node bounds + for i in candidates: + # update eccentricity bounds + d = dist[i] + ecc_lower[i] = low = max(ecc_lower[i], max(d, (current_ecc - d))) + ecc_upper[i] = upp = min(ecc_upper[i], current_ecc + d) + + # update min/max values of lower and upper bounds + minlower = min(ecc_lower[i], minlower) + maxlower = max(ecc_lower[i], maxlower) + minupper = min(ecc_upper[i], minupper) + maxupper = max(ecc_upper[i], maxupper) + + # update candidate set + if compute == "diameter": + ruled_out = { + i + for i in candidates + if ecc_upper[i] <= maxlower and 2 * ecc_lower[i] >= maxupper + } + elif compute == "radius": + ruled_out = { + i + for i in candidates + if ecc_lower[i] >= minupper and ecc_upper[i] + 1 <= 2 * minlower + } + elif compute == "periphery": + ruled_out = { + i + for i in candidates + if ecc_upper[i] < maxlower + and (maxlower == maxupper or ecc_lower[i] > maxupper) + } + elif compute == "center": + ruled_out = { + i + for i in candidates + if ecc_lower[i] > minupper + and (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower) + } + elif compute == "eccentricities": + ruled_out = set() + else: + msg = "compute must be one of 'diameter', 'radius', 'periphery', 'center', 'eccentricities'" + raise ValueError(msg) + + ruled_out.update(i for i in candidates if ecc_lower[i] == ecc_upper[i]) + candidates -= ruled_out + + # for i in ruled_out: + # print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% + # (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper)) + # print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% + # (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper)) + # print("NODE 4: %g"%(ecc_upper[4] <= maxlower)) + # print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper)) + # print("NODE 4: %g"%(ecc_upper[4] <= maxlower + # and 2 * ecc_lower[4] >= maxupper)) + + # updating maxuppernode and minlowernode for selection in next round + for i in candidates: + if ( + minlowernode is None + or ( + ecc_lower[i] == ecc_lower[minlowernode] + and degrees[i] > degrees[minlowernode] + ) + or (ecc_lower[i] < ecc_lower[minlowernode]) + ): + minlowernode = i + + if ( + maxuppernode is None + or ( + ecc_upper[i] == ecc_upper[maxuppernode] + and degrees[i] > degrees[maxuppernode] + ) + or (ecc_upper[i] > ecc_upper[maxuppernode]) + ): + maxuppernode = i + + # print status update + # print (" min=" + str(minlower) + "/" + str(minupper) + + # " max=" + str(maxlower) + "/" + str(maxupper) + + # " candidates: " + str(len(candidates))) + # print("cand:",candidates) + # print("ecc_l",ecc_lower) + # print("ecc_u",ecc_upper) + # wait = input("press Enter to continue") + + # return the correct value of the requested metric + if compute == "diameter": + return maxlower + if compute == "radius": + return minupper + if compute == "periphery": + p = [v for v in G if ecc_lower[v] == maxlower] + return p + if compute == "center": + c = [v for v in G if ecc_upper[v] == minupper] + return c + if compute == "eccentricities": + return ecc_lower + return None + + +@nx._dispatchable(edge_attrs="weight") +def eccentricity(G, v=None, sp=None, weight=None): + """Returns the eccentricity of nodes in G. + + The eccentricity of a node v is the maximum distance from v to + all other nodes in G. + + Parameters + ---------- + G : NetworkX graph + A graph + + v : node, optional + Return value of specified node + + sp : dict of dicts, optional + All pairs shortest path lengths as a dictionary of dictionaries + + weight : string, function, or None (default=None) + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + ecc : dictionary + A dictionary of eccentricity values keyed by node. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> dict(nx.eccentricity(G)) + {1: 2, 2: 3, 3: 2, 4: 2, 5: 3} + + >>> dict(nx.eccentricity(G, v=[1, 5])) # This returns the eccentricity of node 1 & 5 + {1: 2, 5: 3} + + """ + # if v is None: # none, use entire graph + # nodes=G.nodes() + # elif v in G: # is v a single node + # nodes=[v] + # else: # assume v is a container of nodes + # nodes=v + order = G.order() + e = {} + for n in G.nbunch_iter(v): + if sp is None: + length = nx.shortest_path_length(G, source=n, weight=weight) + + L = len(length) + else: + try: + length = sp[n] + L = len(length) + except TypeError as err: + raise nx.NetworkXError('Format of "sp" is invalid.') from err + if L != order: + if G.is_directed(): + msg = ( + "Found infinite path length because the digraph is not" + " strongly connected" + ) + else: + msg = "Found infinite path length because the graph is not" " connected" + raise nx.NetworkXError(msg) + + e[n] = max(length.values()) + + if v in G: + return e[v] # return single value + return e + + +@nx._dispatchable(edge_attrs="weight") +def diameter(G, e=None, usebounds=False, weight=None): + """Returns the diameter of the graph G. + + The diameter is the maximum eccentricity. + + Parameters + ---------- + G : NetworkX graph + A graph + + e : eccentricity dictionary, optional + A precomputed dictionary of eccentricities. + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + d : integer + Diameter of graph + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> nx.diameter(G) + 3 + + See Also + -------- + eccentricity + """ + if usebounds is True and e is None and not G.is_directed(): + return _extrema_bounding(G, compute="diameter", weight=weight) + if e is None: + e = eccentricity(G, weight=weight) + return max(e.values()) + + +@nx._dispatchable(edge_attrs="weight") +def periphery(G, e=None, usebounds=False, weight=None): + """Returns the periphery of the graph G. + + The periphery is the set of nodes with eccentricity equal to the diameter. + + Parameters + ---------- + G : NetworkX graph + A graph + + e : eccentricity dictionary, optional + A precomputed dictionary of eccentricities. + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + p : list + List of nodes in periphery + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> nx.periphery(G) + [2, 5] + + See Also + -------- + barycenter + center + """ + if usebounds is True and e is None and not G.is_directed(): + return _extrema_bounding(G, compute="periphery", weight=weight) + if e is None: + e = eccentricity(G, weight=weight) + diameter = max(e.values()) + p = [v for v in e if e[v] == diameter] + return p + + +@nx._dispatchable(edge_attrs="weight") +def radius(G, e=None, usebounds=False, weight=None): + """Returns the radius of the graph G. + + The radius is the minimum eccentricity. + + Parameters + ---------- + G : NetworkX graph + A graph + + e : eccentricity dictionary, optional + A precomputed dictionary of eccentricities. + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + r : integer + Radius of graph + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> nx.radius(G) + 2 + + """ + if usebounds is True and e is None and not G.is_directed(): + return _extrema_bounding(G, compute="radius", weight=weight) + if e is None: + e = eccentricity(G, weight=weight) + return min(e.values()) + + +@nx._dispatchable(edge_attrs="weight") +def center(G, e=None, usebounds=False, weight=None): + """Returns the center of the graph G. + + The center is the set of nodes with eccentricity equal to radius. + + Parameters + ---------- + G : NetworkX graph + A graph + + e : eccentricity dictionary, optional + A precomputed dictionary of eccentricities. + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + c : list + List of nodes in center + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> list(nx.center(G)) + [1, 3, 4] + + See Also + -------- + barycenter + periphery + """ + if usebounds is True and e is None and not G.is_directed(): + return _extrema_bounding(G, compute="center", weight=weight) + if e is None: + e = eccentricity(G, weight=weight) + radius = min(e.values()) + p = [v for v in e if e[v] == radius] + return p + + +@nx._dispatchable(edge_attrs="weight", mutates_input={"attr": 2}) +def barycenter(G, weight=None, attr=None, sp=None): + r"""Calculate barycenter of a connected graph, optionally with edge weights. + + The :dfn:`barycenter` a + :func:`connected ` graph + :math:`G` is the subgraph induced by the set of its nodes :math:`v` + minimizing the objective function + + .. math:: + + \sum_{u \in V(G)} d_G(u, v), + + where :math:`d_G` is the (possibly weighted) :func:`path length + `. + The barycenter is also called the :dfn:`median`. See [West01]_, p. 78. + + Parameters + ---------- + G : :class:`networkx.Graph` + The connected graph :math:`G`. + weight : :class:`str`, optional + Passed through to + :func:`~networkx.algorithms.shortest_paths.generic.shortest_path_length`. + attr : :class:`str`, optional + If given, write the value of the objective function to each node's + `attr` attribute. Otherwise do not store the value. + sp : dict of dicts, optional + All pairs shortest path lengths as a dictionary of dictionaries + + Returns + ------- + list + Nodes of `G` that induce the barycenter of `G`. + + Raises + ------ + NetworkXNoPath + If `G` is disconnected. `G` may appear disconnected to + :func:`barycenter` if `sp` is given but is missing shortest path + lengths for any pairs. + ValueError + If `sp` and `weight` are both given. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> nx.barycenter(G) + [1, 3, 4] + + See Also + -------- + center + periphery + """ + if sp is None: + sp = nx.shortest_path_length(G, weight=weight) + else: + sp = sp.items() + if weight is not None: + raise ValueError("Cannot use both sp, weight arguments together") + smallest, barycenter_vertices, n = float("inf"), [], len(G) + for v, dists in sp: + if len(dists) < n: + raise nx.NetworkXNoPath( + f"Input graph {G} is disconnected, so every induced subgraph " + "has infinite barycentricity." + ) + barycentricity = sum(dists.values()) + if attr is not None: + G.nodes[v][attr] = barycentricity + if barycentricity < smallest: + smallest = barycentricity + barycenter_vertices = [v] + elif barycentricity == smallest: + barycenter_vertices.append(v) + if attr is not None: + nx._clear_cache(G) + return barycenter_vertices + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True): + """Returns the resistance distance between pairs of nodes in graph G. + + The resistance distance between two nodes of a graph is akin to treating + the graph as a grid of resistors with a resistance equal to the provided + weight [1]_, [2]_. + + If weight is not provided, then a weight of 1 is used for all edges. + + If two nodes are the same, the resistance distance is zero. + + Parameters + ---------- + G : NetworkX graph + A graph + + nodeA : node or None, optional (default=None) + A node within graph G. + If None, compute resistance distance using all nodes as source nodes. + + nodeB : node or None, optional (default=None) + A node within graph G. + If None, compute resistance distance using all nodes as target nodes. + + weight : string or None, optional (default=None) + The edge data key used to compute the resistance distance. + If None, then each edge has weight 1. + + invert_weight : boolean (default=True) + Proper calculation of resistance distance requires building the + Laplacian matrix with the reciprocal of the weight. Not required + if the weight is already inverted. Weight cannot be zero. + + Returns + ------- + rd : dict or float + If `nodeA` and `nodeB` are given, resistance distance between `nodeA` + and `nodeB`. If `nodeA` or `nodeB` is unspecified (the default), a + dictionary of nodes with resistance distances as the value. + + Raises + ------ + NetworkXNotImplemented + If `G` is a directed graph. + + NetworkXError + If `G` is not connected, or contains no nodes, + or `nodeA` is not in `G` or `nodeB` is not in `G`. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> round(nx.resistance_distance(G, 1, 3), 10) + 0.625 + + Notes + ----- + The implementation is based on Theorem A in [2]_. Self-loops are ignored. + Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights. + + References + ---------- + .. [1] Wikipedia + "Resistance distance." + https://en.wikipedia.org/wiki/Resistance_distance + .. [2] D. J. Klein and M. Randic. + Resistance distance. + J. of Math. Chem. 12:81-95, 1993. + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXError("Graph G must contain at least one node.") + if not nx.is_connected(G): + raise nx.NetworkXError("Graph G must be strongly connected.") + if nodeA is not None and nodeA not in G: + raise nx.NetworkXError("Node A is not in graph G.") + if nodeB is not None and nodeB not in G: + raise nx.NetworkXError("Node B is not in graph G.") + + G = G.copy() + node_list = list(G) + + # Invert weights + if invert_weight and weight is not None: + if G.is_multigraph(): + for u, v, k, d in G.edges(keys=True, data=True): + d[weight] = 1 / d[weight] + else: + for u, v, d in G.edges(data=True): + d[weight] = 1 / d[weight] + + # Compute resistance distance using the Pseudo-inverse of the Laplacian + # Self-loops are ignored + L = nx.laplacian_matrix(G, weight=weight).todense() + Linv = np.linalg.pinv(L, hermitian=True) + + # Return relevant distances + if nodeA is not None and nodeB is not None: + i = node_list.index(nodeA) + j = node_list.index(nodeB) + return Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) + + elif nodeA is not None: + i = node_list.index(nodeA) + d = {} + for n in G: + j = node_list.index(n) + d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) + return d + + elif nodeB is not None: + j = node_list.index(nodeB) + d = {} + for n in G: + i = node_list.index(n) + d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) + return d + + else: + d = {} + for n in G: + i = node_list.index(n) + d[n] = {} + for n2 in G: + j = node_list.index(n2) + d[n][n2] = ( + Linv.item(i, i) + + Linv.item(j, j) + - Linv.item(i, j) + - Linv.item(j, i) + ) + return d + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def effective_graph_resistance(G, weight=None, invert_weight=True): + """Returns the Effective graph resistance of G. + + Also known as the Kirchhoff index. + + The effective graph resistance is defined as the sum + of the resistance distance of every node pair in G [1]_. + + If weight is not provided, then a weight of 1 is used for all edges. + + The effective graph resistance of a disconnected graph is infinite. + + Parameters + ---------- + G : NetworkX graph + A graph + + weight : string or None, optional (default=None) + The edge data key used to compute the effective graph resistance. + If None, then each edge has weight 1. + + invert_weight : boolean (default=True) + Proper calculation of resistance distance requires building the + Laplacian matrix with the reciprocal of the weight. Not required + if the weight is already inverted. Weight cannot be zero. + + Returns + ------- + RG : float + The effective graph resistance of `G`. + + Raises + ------ + NetworkXNotImplemented + If `G` is a directed graph. + + NetworkXError + If `G` does not contain any nodes. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> round(nx.effective_graph_resistance(G), 10) + 10.25 + + Notes + ----- + The implementation is based on Theorem 2.2 in [2]_. Self-loops are ignored. + Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights. + + References + ---------- + .. [1] Wolfram + "Kirchhoff Index." + https://mathworld.wolfram.com/KirchhoffIndex.html + .. [2] W. Ellens, F. M. Spieksma, P. Van Mieghem, A. Jamakovic, R. E. Kooij. + Effective graph resistance. + Lin. Alg. Appl. 435:2491-2506, 2011. + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXError("Graph G must contain at least one node.") + + # Disconnected graphs have infinite Effective graph resistance + if not nx.is_connected(G): + return float("inf") + + # Invert weights + G = G.copy() + if invert_weight and weight is not None: + if G.is_multigraph(): + for u, v, k, d in G.edges(keys=True, data=True): + d[weight] = 1 / d[weight] + else: + for u, v, d in G.edges(data=True): + d[weight] = 1 / d[weight] + + # Get Laplacian eigenvalues + mu = np.sort(nx.laplacian_spectrum(G, weight=weight)) + + # Compute Effective graph resistance based on spectrum of the Laplacian + # Self-loops are ignored + return float(np.sum(1 / mu[1:]) * G.number_of_nodes()) + + +@nx.utils.not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def kemeny_constant(G, *, weight=None): + """Returns the Kemeny constant of the given graph. + + The *Kemeny constant* (or Kemeny's constant) of a graph `G` + can be computed by regarding the graph as a Markov chain. + The Kemeny constant is then the expected number of time steps + to transition from a starting state i to a random destination state + sampled from the Markov chain's stationary distribution. + The Kemeny constant is independent of the chosen initial state [1]_. + + The Kemeny constant measures the time needed for spreading + across a graph. Low values indicate a closely connected graph + whereas high values indicate a spread-out graph. + + If weight is not provided, then a weight of 1 is used for all edges. + + Since `G` represents a Markov chain, the weights must be positive. + + Parameters + ---------- + G : NetworkX graph + + weight : string or None, optional (default=None) + The edge data key used to compute the Kemeny constant. + If None, then each edge has weight 1. + + Returns + ------- + float + The Kemeny constant of the graph `G`. + + Raises + ------ + NetworkXNotImplemented + If the graph `G` is directed. + + NetworkXError + If the graph `G` is not connected, or contains no nodes, + or has edges with negative weights. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> round(nx.kemeny_constant(G), 10) + 3.2 + + Notes + ----- + The implementation is based on equation (3.3) in [2]_. + Self-loops are allowed and indicate a Markov chain where + the state can remain the same. Multi-edges are contracted + in one edge with weight equal to the sum of the weights. + + References + ---------- + .. [1] Wikipedia + "Kemeny's constant." + https://en.wikipedia.org/wiki/Kemeny%27s_constant + .. [2] Lovász L. + Random walks on graphs: A survey. + Paul Erdös is Eighty, vol. 2, Bolyai Society, + Mathematical Studies, Keszthely, Hungary (1993), pp. 1-46 + """ + import numpy as np + import scipy as sp + + if len(G) == 0: + raise nx.NetworkXError("Graph G must contain at least one node.") + if not nx.is_connected(G): + raise nx.NetworkXError("Graph G must be connected.") + if nx.is_negatively_weighted(G, weight=weight): + raise nx.NetworkXError("The weights of graph G must be nonnegative.") + + # Compute matrix H = D^-1/2 A D^-1/2 + A = nx.adjacency_matrix(G, weight=weight) + n, m = A.shape + diags = A.sum(axis=1) + with np.errstate(divide="ignore"): + diags_sqrt = 1.0 / np.sqrt(diags) + diags_sqrt[np.isinf(diags_sqrt)] = 0 + DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr")) + H = DH @ (A @ DH) + + # Compute eigenvalues of H + eig = np.sort(sp.linalg.eigvalsh(H.todense())) + + # Compute the Kemeny constant + return float(np.sum(1 / (1 - eig[:-1]))) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py b/venv/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py new file mode 100644 index 0000000000000000000000000000000000000000..27b4d0216e427a03f6cc0b90d15f4debb2d52b56 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py @@ -0,0 +1,238 @@ +""" +======================= +Distance-regular graphs +======================= +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +from .distance_measures import diameter + +__all__ = [ + "is_distance_regular", + "is_strongly_regular", + "intersection_array", + "global_parameters", +] + + +@nx._dispatchable +def is_distance_regular(G): + """Returns True if the graph is distance regular, False otherwise. + + A connected graph G is distance-regular if for any nodes x,y + and any integers i,j=0,1,...,d (where d is the graph + diameter), the number of vertices at distance i from x and + distance j from y depends only on i,j and the graph distance + between x and y, independently of the choice of x and y. + + Parameters + ---------- + G: Networkx graph (undirected) + + Returns + ------- + bool + True if the graph is Distance Regular, False otherwise + + Examples + -------- + >>> G = nx.hypercube_graph(6) + >>> nx.is_distance_regular(G) + True + + See Also + -------- + intersection_array, global_parameters + + Notes + ----- + For undirected and simple graphs only + + References + ---------- + .. [1] Brouwer, A. E.; Cohen, A. M.; and Neumaier, A. + Distance-Regular Graphs. New York: Springer-Verlag, 1989. + .. [2] Weisstein, Eric W. "Distance-Regular Graph." + http://mathworld.wolfram.com/Distance-RegularGraph.html + + """ + try: + intersection_array(G) + return True + except nx.NetworkXError: + return False + + +def global_parameters(b, c): + """Returns global parameters for a given intersection array. + + Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d + such that for any 2 vertices x,y in G at a distance i=d(x,y), there + are exactly c_i neighbors of y at a distance of i-1 from x and b_i + neighbors of y at a distance of i+1 from x. + + Thus, a distance regular graph has the global parameters, + [[c_0,a_0,b_0],[c_1,a_1,b_1],......,[c_d,a_d,b_d]] for the + intersection array [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d] + where a_i+b_i+c_i=k , k= degree of every vertex. + + Parameters + ---------- + b : list + + c : list + + Returns + ------- + iterable + An iterable over three tuples. + + Examples + -------- + >>> G = nx.dodecahedral_graph() + >>> b, c = nx.intersection_array(G) + >>> list(nx.global_parameters(b, c)) + [(0, 0, 3), (1, 0, 2), (1, 1, 1), (1, 1, 1), (2, 0, 1), (3, 0, 0)] + + References + ---------- + .. [1] Weisstein, Eric W. "Global Parameters." + From MathWorld--A Wolfram Web Resource. + http://mathworld.wolfram.com/GlobalParameters.html + + See Also + -------- + intersection_array + """ + return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c)) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def intersection_array(G): + """Returns the intersection array of a distance-regular graph. + + Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d + such that for any 2 vertices x,y in G at a distance i=d(x,y), there + are exactly c_i neighbors of y at a distance of i-1 from x and b_i + neighbors of y at a distance of i+1 from x. + + A distance regular graph's intersection array is given by, + [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d] + + Parameters + ---------- + G: Networkx graph (undirected) + + Returns + ------- + b,c: tuple of lists + + Examples + -------- + >>> G = nx.icosahedral_graph() + >>> nx.intersection_array(G) + ([5, 2, 1], [1, 2, 5]) + + References + ---------- + .. [1] Weisstein, Eric W. "Intersection Array." + From MathWorld--A Wolfram Web Resource. + http://mathworld.wolfram.com/IntersectionArray.html + + See Also + -------- + global_parameters + """ + # test for regular graph (all degrees must be equal) + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") + degree = iter(G.degree()) + (_, k) = next(degree) + for _, knext in degree: + if knext != k: + raise nx.NetworkXError("Graph is not distance regular.") + k = knext + path_length = dict(nx.all_pairs_shortest_path_length(G)) + diameter = max(max(path_length[n].values()) for n in path_length) + bint = {} # 'b' intersection array + cint = {} # 'c' intersection array + for u in G: + for v in G: + try: + i = path_length[u][v] + except KeyError as err: # graph must be connected + raise nx.NetworkXError("Graph is not distance regular.") from err + # number of neighbors of v at a distance of i-1 from u + c = len([n for n in G[v] if path_length[n][u] == i - 1]) + # number of neighbors of v at a distance of i+1 from u + b = len([n for n in G[v] if path_length[n][u] == i + 1]) + # b,c are independent of u and v + if cint.get(i, c) != c or bint.get(i, b) != b: + raise nx.NetworkXError("Graph is not distance regular") + bint[i] = b + cint[i] = c + return ( + [bint.get(j, 0) for j in range(diameter)], + [cint.get(j + 1, 0) for j in range(diameter)], + ) + + +# TODO There is a definition for directed strongly regular graphs. +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_strongly_regular(G): + """Returns True if and only if the given graph is strongly + regular. + + An undirected graph is *strongly regular* if + + * it is regular, + * each pair of adjacent vertices has the same number of neighbors in + common, + * each pair of nonadjacent vertices has the same number of neighbors + in common. + + Each strongly regular graph is a distance-regular graph. + Conversely, if a distance-regular graph has diameter two, then it is + a strongly regular graph. For more information on distance-regular + graphs, see :func:`is_distance_regular`. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + Returns + ------- + bool + Whether `G` is strongly regular. + + Examples + -------- + + The cycle graph on five vertices is strongly regular. It is + two-regular, each pair of adjacent vertices has no shared neighbors, + and each pair of nonadjacent vertices has one shared neighbor:: + + >>> G = nx.cycle_graph(5) + >>> nx.is_strongly_regular(G) + True + + """ + # Here is an alternate implementation based directly on the + # definition of strongly regular graphs: + # + # return (all_equal(G.degree().values()) + # and all_equal(len(common_neighbors(G, u, v)) + # for u, v in G.edges()) + # and all_equal(len(common_neighbors(G, u, v)) + # for u, v in non_edges(G))) + # + # We instead use the fact that a distance-regular graph of diameter + # two is strongly regular. + return is_distance_regular(G) and diameter(G) == 2 diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py b/venv/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py new file mode 100644 index 0000000000000000000000000000000000000000..4bb01cb45687a92d56ec56c015751d6abdeaa632 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py @@ -0,0 +1,48 @@ +""" +Flow Hierarchy. +""" +import networkx as nx + +__all__ = ["flow_hierarchy"] + + +@nx._dispatchable(edge_attrs="weight") +def flow_hierarchy(G, weight=None): + """Returns the flow hierarchy of a directed network. + + Flow hierarchy is defined as the fraction of edges not participating + in cycles in a directed graph [1]_. + + Parameters + ---------- + G : DiGraph or MultiDiGraph + A directed graph + + weight : string, optional (default=None) + Attribute to use for edge weights. If None the weight defaults to 1. + + Returns + ------- + h : float + Flow hierarchy value + + Notes + ----- + The algorithm described in [1]_ computes the flow hierarchy through + exponentiation of the adjacency matrix. This function implements an + alternative approach that finds strongly connected components. + An edge is in a cycle if and only if it is in a strongly connected + component, which can be found in $O(m)$ time using Tarjan's algorithm. + + References + ---------- + .. [1] Luo, J.; Magee, C.L. (2011), + Detecting evolving patterns of self-organizing networks by flow + hierarchy measurement, Complexity, Volume 16 Issue 6 53-61. + DOI: 10.1002/cplx.20368 + http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf + """ + if not G.is_directed(): + raise nx.NetworkXError("G must be a digraph in flow_hierarchy") + scc = nx.strongly_connected_components(G) + return 1 - sum(G.subgraph(c).size(weight) for c in scc) / G.size(weight) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isolate.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isolate.py new file mode 100644 index 0000000000000000000000000000000000000000..23ac23875dbcdafacf82e6995ba5e1ef6f48df05 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isolate.py @@ -0,0 +1,107 @@ +""" +Functions for identifying isolate (degree zero) nodes. +""" +import networkx as nx + +__all__ = ["is_isolate", "isolates", "number_of_isolates"] + + +@nx._dispatchable +def is_isolate(G, n): + """Determines whether a node is an isolate. + + An *isolate* is a node with no neighbors (that is, with degree + zero). For directed graphs, this means no in-neighbors and no + out-neighbors. + + Parameters + ---------- + G : NetworkX graph + + n : node + A node in `G`. + + Returns + ------- + is_isolate : bool + True if and only if `n` has no neighbors. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edge(1, 2) + >>> G.add_node(3) + >>> nx.is_isolate(G, 2) + False + >>> nx.is_isolate(G, 3) + True + """ + return G.degree(n) == 0 + + +@nx._dispatchable +def isolates(G): + """Iterator over isolates in the graph. + + An *isolate* is a node with no neighbors (that is, with degree + zero). For directed graphs, this means no in-neighbors and no + out-neighbors. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + iterator + An iterator over the isolates of `G`. + + Examples + -------- + To get a list of all isolates of a graph, use the :class:`list` + constructor:: + + >>> G = nx.Graph() + >>> G.add_edge(1, 2) + >>> G.add_node(3) + >>> list(nx.isolates(G)) + [3] + + To remove all isolates in the graph, first create a list of the + isolates, then use :meth:`Graph.remove_nodes_from`:: + + >>> G.remove_nodes_from(list(nx.isolates(G))) + >>> list(G) + [1, 2] + + For digraphs, isolates have zero in-degree and zero out_degre:: + + >>> G = nx.DiGraph([(0, 1), (1, 2)]) + >>> G.add_node(3) + >>> list(nx.isolates(G)) + [3] + + """ + return (n for n, d in G.degree() if d == 0) + + +@nx._dispatchable +def number_of_isolates(G): + """Returns the number of isolates in the graph. + + An *isolate* is a node with no neighbors (that is, with degree + zero). For directed graphs, this means no in-neighbors and no + out-neighbors. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + int + The number of degree zero nodes in the graph `G`. + + """ + # TODO This can be parallelized. + return sum(1 for v in isolates(G)) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..58c22688660073a6abb59f7639871f711d1bd6ac --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py @@ -0,0 +1,7 @@ +from networkx.algorithms.isomorphism.isomorph import * +from networkx.algorithms.isomorphism.vf2userfunc import * +from networkx.algorithms.isomorphism.matchhelpers import * +from networkx.algorithms.isomorphism.temporalisomorphvf2 import * +from networkx.algorithms.isomorphism.ismags import * +from networkx.algorithms.isomorphism.tree_isomorphism import * +from networkx.algorithms.isomorphism.vf2pp import * diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b68cc93ee4ae56fde9ac5bde87a25dd1c59b2c1f Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c81be53f58d38ea5a9725ee7aff8860d16f1981d Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1376a7e5b59fdb2a11096b5f7b1ca32b9260a519 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..56dd2dffd514558982f594300df6ff426b21f755 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..650705317178b507e9817d054ed9d90d4867ab36 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..57133b1f7197e6996387436d683ef492a0d9b601 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..087887b3b5ad872e4a3ca3d55dbaa8eedddfa251 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46b8b33f53340a0d2188e65327226e568e8f5d48 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fd9faa8dbc57da0af221f47a0d11f625107d472f Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py new file mode 100644 index 0000000000000000000000000000000000000000..24819faf95cf42f8264960a4a6a27754301fc661 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py @@ -0,0 +1,1163 @@ +""" +ISMAGS Algorithm +================ + +Provides a Python implementation of the ISMAGS algorithm. [1]_ + +It is capable of finding (subgraph) isomorphisms between two graphs, taking the +symmetry of the subgraph into account. In most cases the VF2 algorithm is +faster (at least on small graphs) than this implementation, but in some cases +there is an exponential number of isomorphisms that are symmetrically +equivalent. In that case, the ISMAGS algorithm will provide only one solution +per symmetry group. + +>>> petersen = nx.petersen_graph() +>>> ismags = nx.isomorphism.ISMAGS(petersen, petersen) +>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False)) +>>> len(isomorphisms) +120 +>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True)) +>>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}] +>>> answer == isomorphisms +True + +In addition, this implementation also provides an interface to find the +largest common induced subgraph [2]_ between any two graphs, again taking +symmetry into account. Given `graph` and `subgraph` the algorithm will remove +nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of +`graph`. Since only the symmetry of `subgraph` is taken into account it is +worth thinking about how you provide your graphs: + +>>> graph1 = nx.path_graph(4) +>>> graph2 = nx.star_graph(3) +>>> ismags = nx.isomorphism.ISMAGS(graph1, graph2) +>>> ismags.is_isomorphic() +False +>>> largest_common_subgraph = list(ismags.largest_common_subgraph()) +>>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}] +>>> answer == largest_common_subgraph +True +>>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1) +>>> largest_common_subgraph = list(ismags2.largest_common_subgraph()) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 0: 1, 3: 2}, +... {2: 0, 0: 1, 1: 2}, +... {2: 0, 0: 1, 3: 2}, +... {3: 0, 0: 1, 1: 2}, +... {3: 0, 0: 1, 2: 2}, +... ] +>>> answer == largest_common_subgraph +True + +However, when not taking symmetry into account, it doesn't matter: + +>>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False)) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 2: 1, 0: 2}, +... {2: 0, 1: 1, 3: 2}, +... {2: 0, 3: 1, 1: 2}, +... {1: 0, 0: 1, 2: 3}, +... {1: 0, 2: 1, 0: 3}, +... {2: 0, 1: 1, 3: 3}, +... {2: 0, 3: 1, 1: 3}, +... {1: 0, 0: 2, 2: 3}, +... {1: 0, 2: 2, 0: 3}, +... {2: 0, 1: 2, 3: 3}, +... {2: 0, 3: 2, 1: 3}, +... ] +>>> answer == largest_common_subgraph +True +>>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False)) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 0: 1, 3: 2}, +... {2: 0, 0: 1, 1: 2}, +... {2: 0, 0: 1, 3: 2}, +... {3: 0, 0: 1, 1: 2}, +... {3: 0, 0: 1, 2: 2}, +... {1: 1, 0: 2, 2: 3}, +... {1: 1, 0: 2, 3: 3}, +... {2: 1, 0: 2, 1: 3}, +... {2: 1, 0: 2, 3: 3}, +... {3: 1, 0: 2, 1: 3}, +... {3: 1, 0: 2, 2: 3}, +... ] +>>> answer == largest_common_subgraph +True + +Notes +----- +- The current implementation works for undirected graphs only. The algorithm + in general should work for directed graphs as well though. +- Node keys for both provided graphs need to be fully orderable as well as + hashable. +- Node and edge equality is assumed to be transitive: if A is equal to B, and + B is equal to C, then A is equal to C. + +References +---------- +.. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle, + M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General + Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph + Enumeration", PLoS One 9(5): e97896, 2014. + https://doi.org/10.1371/journal.pone.0097896 +.. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph +""" + +__all__ = ["ISMAGS"] + +import itertools +from collections import Counter, defaultdict +from functools import reduce, wraps + + +def are_all_equal(iterable): + """ + Returns ``True`` if and only if all elements in `iterable` are equal; and + ``False`` otherwise. + + Parameters + ---------- + iterable: collections.abc.Iterable + The container whose elements will be checked. + + Returns + ------- + bool + ``True`` iff all elements in `iterable` compare equal, ``False`` + otherwise. + """ + try: + shape = iterable.shape + except AttributeError: + pass + else: + if len(shape) > 1: + message = "The function does not works on multidimensional arrays." + raise NotImplementedError(message) from None + + iterator = iter(iterable) + first = next(iterator, None) + return all(item == first for item in iterator) + + +def make_partitions(items, test): + """ + Partitions items into sets based on the outcome of ``test(item1, item2)``. + Pairs of items for which `test` returns `True` end up in the same set. + + Parameters + ---------- + items : collections.abc.Iterable[collections.abc.Hashable] + Items to partition + test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable] + A function that will be called with 2 arguments, taken from items. + Should return `True` if those 2 items need to end up in the same + partition, and `False` otherwise. + + Returns + ------- + list[set] + A list of sets, with each set containing part of the items in `items`, + such that ``all(test(*pair) for pair in itertools.combinations(set, 2)) + == True`` + + Notes + ----- + The function `test` is assumed to be transitive: if ``test(a, b)`` and + ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``. + """ + partitions = [] + for item in items: + for partition in partitions: + p_item = next(iter(partition)) + if test(item, p_item): + partition.add(item) + break + else: # No break + partitions.append({item}) + return partitions + + +def partition_to_color(partitions): + """ + Creates a dictionary that maps each item in each partition to the index of + the partition to which it belongs. + + Parameters + ---------- + partitions: collections.abc.Sequence[collections.abc.Iterable] + As returned by :func:`make_partitions`. + + Returns + ------- + dict + """ + colors = {} + for color, keys in enumerate(partitions): + for key in keys: + colors[key] = color + return colors + + +def intersect(collection_of_sets): + """ + Given an collection of sets, returns the intersection of those sets. + + Parameters + ---------- + collection_of_sets: collections.abc.Collection[set] + A collection of sets. + + Returns + ------- + set + An intersection of all sets in `collection_of_sets`. Will have the same + type as the item initially taken from `collection_of_sets`. + """ + collection_of_sets = list(collection_of_sets) + first = collection_of_sets.pop() + out = reduce(set.intersection, collection_of_sets, set(first)) + return type(first)(out) + + +class ISMAGS: + """ + Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for + "Index-based Subgraph Matching Algorithm with General Symmetries". As the + name implies, it is symmetry aware and will only generate non-symmetric + isomorphisms. + + Notes + ----- + The implementation imposes additional conditions compared to the VF2 + algorithm on the graphs provided and the comparison functions + (:attr:`node_equality` and :attr:`edge_equality`): + + - Node keys in both graphs must be orderable as well as hashable. + - Equality must be transitive: if A is equal to B, and B is equal to C, + then A must be equal to C. + + Attributes + ---------- + graph: networkx.Graph + subgraph: networkx.Graph + node_equality: collections.abc.Callable + The function called to see if two nodes should be considered equal. + It's signature looks like this: + ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``. + `node1` is a node in `graph1`, and `node2` a node in `graph2`. + Constructed from the argument `node_match`. + edge_equality: collections.abc.Callable + The function called to see if two edges should be considered equal. + It's signature looks like this: + ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``. + `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`. + Constructed from the argument `edge_match`. + + References + ---------- + .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle, + M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General + Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph + Enumeration", PLoS One 9(5): e97896, 2014. + https://doi.org/10.1371/journal.pone.0097896 + """ + + def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None): + """ + Parameters + ---------- + graph: networkx.Graph + subgraph: networkx.Graph + node_match: collections.abc.Callable or None + Function used to determine whether two nodes are equivalent. Its + signature should look like ``f(n1: dict, n2: dict) -> bool``, with + `n1` and `n2` node property dicts. See also + :func:`~networkx.algorithms.isomorphism.categorical_node_match` and + friends. + If `None`, all nodes are considered equal. + edge_match: collections.abc.Callable or None + Function used to determine whether two edges are equivalent. Its + signature should look like ``f(e1: dict, e2: dict) -> bool``, with + `e1` and `e2` edge property dicts. See also + :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and + friends. + If `None`, all edges are considered equal. + cache: collections.abc.Mapping + A cache used for caching graph symmetries. + """ + # TODO: graph and subgraph setter methods that invalidate the caches. + # TODO: allow for precomputed partitions and colors + self.graph = graph + self.subgraph = subgraph + self._symmetry_cache = cache + # Naming conventions are taken from the original paper. For your + # sanity: + # sg: subgraph + # g: graph + # e: edge(s) + # n: node(s) + # So: sgn means "subgraph nodes". + self._sgn_partitions_ = None + self._sge_partitions_ = None + + self._sgn_colors_ = None + self._sge_colors_ = None + + self._gn_partitions_ = None + self._ge_partitions_ = None + + self._gn_colors_ = None + self._ge_colors_ = None + + self._node_compat_ = None + self._edge_compat_ = None + + if node_match is None: + self.node_equality = self._node_match_maker(lambda n1, n2: True) + self._sgn_partitions_ = [set(self.subgraph.nodes)] + self._gn_partitions_ = [set(self.graph.nodes)] + self._node_compat_ = {0: 0} + else: + self.node_equality = self._node_match_maker(node_match) + if edge_match is None: + self.edge_equality = self._edge_match_maker(lambda e1, e2: True) + self._sge_partitions_ = [set(self.subgraph.edges)] + self._ge_partitions_ = [set(self.graph.edges)] + self._edge_compat_ = {0: 0} + else: + self.edge_equality = self._edge_match_maker(edge_match) + + @property + def _sgn_partitions(self): + if self._sgn_partitions_ is None: + + def nodematch(node1, node2): + return self.node_equality(self.subgraph, node1, self.subgraph, node2) + + self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch) + return self._sgn_partitions_ + + @property + def _sge_partitions(self): + if self._sge_partitions_ is None: + + def edgematch(edge1, edge2): + return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2) + + self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch) + return self._sge_partitions_ + + @property + def _gn_partitions(self): + if self._gn_partitions_ is None: + + def nodematch(node1, node2): + return self.node_equality(self.graph, node1, self.graph, node2) + + self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch) + return self._gn_partitions_ + + @property + def _ge_partitions(self): + if self._ge_partitions_ is None: + + def edgematch(edge1, edge2): + return self.edge_equality(self.graph, edge1, self.graph, edge2) + + self._ge_partitions_ = make_partitions(self.graph.edges, edgematch) + return self._ge_partitions_ + + @property + def _sgn_colors(self): + if self._sgn_colors_ is None: + self._sgn_colors_ = partition_to_color(self._sgn_partitions) + return self._sgn_colors_ + + @property + def _sge_colors(self): + if self._sge_colors_ is None: + self._sge_colors_ = partition_to_color(self._sge_partitions) + return self._sge_colors_ + + @property + def _gn_colors(self): + if self._gn_colors_ is None: + self._gn_colors_ = partition_to_color(self._gn_partitions) + return self._gn_colors_ + + @property + def _ge_colors(self): + if self._ge_colors_ is None: + self._ge_colors_ = partition_to_color(self._ge_partitions) + return self._ge_colors_ + + @property + def _node_compatibility(self): + if self._node_compat_ is not None: + return self._node_compat_ + self._node_compat_ = {} + for sgn_part_color, gn_part_color in itertools.product( + range(len(self._sgn_partitions)), range(len(self._gn_partitions)) + ): + sgn = next(iter(self._sgn_partitions[sgn_part_color])) + gn = next(iter(self._gn_partitions[gn_part_color])) + if self.node_equality(self.subgraph, sgn, self.graph, gn): + self._node_compat_[sgn_part_color] = gn_part_color + return self._node_compat_ + + @property + def _edge_compatibility(self): + if self._edge_compat_ is not None: + return self._edge_compat_ + self._edge_compat_ = {} + for sge_part_color, ge_part_color in itertools.product( + range(len(self._sge_partitions)), range(len(self._ge_partitions)) + ): + sge = next(iter(self._sge_partitions[sge_part_color])) + ge = next(iter(self._ge_partitions[ge_part_color])) + if self.edge_equality(self.subgraph, sge, self.graph, ge): + self._edge_compat_[sge_part_color] = ge_part_color + return self._edge_compat_ + + @staticmethod + def _node_match_maker(cmp): + @wraps(cmp) + def comparer(graph1, node1, graph2, node2): + return cmp(graph1.nodes[node1], graph2.nodes[node2]) + + return comparer + + @staticmethod + def _edge_match_maker(cmp): + @wraps(cmp) + def comparer(graph1, edge1, graph2, edge2): + return cmp(graph1.edges[edge1], graph2.edges[edge2]) + + return comparer + + def find_isomorphisms(self, symmetry=True): + """Find all subgraph isomorphisms between subgraph and graph + + Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`. + + Parameters + ---------- + symmetry: bool + Whether symmetry should be taken into account. If False, found + isomorphisms may be symmetrically equivalent. + + Yields + ------ + dict + The found isomorphism mappings of {graph_node: subgraph_node}. + """ + # The networkx VF2 algorithm is slightly funny in when it yields an + # empty dict and when not. + if not self.subgraph: + yield {} + return + elif not self.graph: + return + elif len(self.graph) < len(self.subgraph): + return + + if symmetry: + _, cosets = self.analyze_symmetry( + self.subgraph, self._sgn_partitions, self._sge_colors + ) + constraints = self._make_constraints(cosets) + else: + constraints = [] + + candidates = self._find_nodecolor_candidates() + la_candidates = self._get_lookahead_candidates() + for sgn in self.subgraph: + extra_candidates = la_candidates[sgn] + if extra_candidates: + candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)} + + if any(candidates.values()): + start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len)) + candidates[start_sgn] = (intersect(candidates[start_sgn]),) + yield from self._map_nodes(start_sgn, candidates, constraints) + else: + return + + @staticmethod + def _find_neighbor_color_count(graph, node, node_color, edge_color): + """ + For `node` in `graph`, count the number of edges of a specific color + it has to nodes of a specific color. + """ + counts = Counter() + neighbors = graph[node] + for neighbor in neighbors: + n_color = node_color[neighbor] + if (node, neighbor) in edge_color: + e_color = edge_color[node, neighbor] + else: + e_color = edge_color[neighbor, node] + counts[e_color, n_color] += 1 + return counts + + def _get_lookahead_candidates(self): + """ + Returns a mapping of {subgraph node: collection of graph nodes} for + which the graph nodes are feasible candidates for the subgraph node, as + determined by looking ahead one edge. + """ + g_counts = {} + for gn in self.graph: + g_counts[gn] = self._find_neighbor_color_count( + self.graph, gn, self._gn_colors, self._ge_colors + ) + candidates = defaultdict(set) + for sgn in self.subgraph: + sg_count = self._find_neighbor_color_count( + self.subgraph, sgn, self._sgn_colors, self._sge_colors + ) + new_sg_count = Counter() + for (sge_color, sgn_color), count in sg_count.items(): + try: + ge_color = self._edge_compatibility[sge_color] + gn_color = self._node_compatibility[sgn_color] + except KeyError: + pass + else: + new_sg_count[ge_color, gn_color] = count + + for gn, g_count in g_counts.items(): + if all(new_sg_count[x] <= g_count[x] for x in new_sg_count): + # Valid candidate + candidates[sgn].add(gn) + return candidates + + def largest_common_subgraph(self, symmetry=True): + """ + Find the largest common induced subgraphs between :attr:`subgraph` and + :attr:`graph`. + + Parameters + ---------- + symmetry: bool + Whether symmetry should be taken into account. If False, found + largest common subgraphs may be symmetrically equivalent. + + Yields + ------ + dict + The found isomorphism mappings of {graph_node: subgraph_node}. + """ + # The networkx VF2 algorithm is slightly funny in when it yields an + # empty dict and when not. + if not self.subgraph: + yield {} + return + elif not self.graph: + return + + if symmetry: + _, cosets = self.analyze_symmetry( + self.subgraph, self._sgn_partitions, self._sge_colors + ) + constraints = self._make_constraints(cosets) + else: + constraints = [] + + candidates = self._find_nodecolor_candidates() + + if any(candidates.values()): + yield from self._largest_common_subgraph(candidates, constraints) + else: + return + + def analyze_symmetry(self, graph, node_partitions, edge_colors): + """ + Find a minimal set of permutations and corresponding co-sets that + describe the symmetry of `graph`, given the node and edge equalities + given by `node_partitions` and `edge_colors`, respectively. + + Parameters + ---------- + graph : networkx.Graph + The graph whose symmetry should be analyzed. + node_partitions : list of sets + A list of sets containing node keys. Node keys in the same set + are considered equivalent. Every node key in `graph` should be in + exactly one of the sets. If all nodes are equivalent, this should + be ``[set(graph.nodes)]``. + edge_colors : dict mapping edges to their colors + A dict mapping every edge in `graph` to its corresponding color. + Edges with the same color are considered equivalent. If all edges + are equivalent, this should be ``{e: 0 for e in graph.edges}``. + + + Returns + ------- + set[frozenset] + The found permutations. This is a set of frozensets of pairs of node + keys which can be exchanged without changing :attr:`subgraph`. + dict[collections.abc.Hashable, set[collections.abc.Hashable]] + The found co-sets. The co-sets is a dictionary of + ``{node key: set of node keys}``. + Every key-value pair describes which ``values`` can be interchanged + without changing nodes less than ``key``. + """ + if self._symmetry_cache is not None: + key = hash( + ( + tuple(graph.nodes), + tuple(graph.edges), + tuple(map(tuple, node_partitions)), + tuple(edge_colors.items()), + ) + ) + if key in self._symmetry_cache: + return self._symmetry_cache[key] + node_partitions = list( + self._refine_node_partitions(graph, node_partitions, edge_colors) + ) + assert len(node_partitions) == 1 + node_partitions = node_partitions[0] + permutations, cosets = self._process_ordered_pair_partitions( + graph, node_partitions, node_partitions, edge_colors + ) + if self._symmetry_cache is not None: + self._symmetry_cache[key] = permutations, cosets + return permutations, cosets + + def is_isomorphic(self, symmetry=False): + """ + Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and + False otherwise. + + Returns + ------- + bool + """ + return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic( + symmetry + ) + + def subgraph_is_isomorphic(self, symmetry=False): + """ + Returns True if a subgraph of :attr:`graph` is isomorphic to + :attr:`subgraph` and False otherwise. + + Returns + ------- + bool + """ + # symmetry=False, since we only need to know whether there is any + # example; figuring out all symmetry elements probably costs more time + # than it gains. + isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None) + return isom is not None + + def isomorphisms_iter(self, symmetry=True): + """ + Does the same as :meth:`find_isomorphisms` if :attr:`graph` and + :attr:`subgraph` have the same number of nodes. + """ + if len(self.graph) == len(self.subgraph): + yield from self.subgraph_isomorphisms_iter(symmetry=symmetry) + + def subgraph_isomorphisms_iter(self, symmetry=True): + """Alternative name for :meth:`find_isomorphisms`.""" + return self.find_isomorphisms(symmetry) + + def _find_nodecolor_candidates(self): + """ + Per node in subgraph find all nodes in graph that have the same color. + """ + candidates = defaultdict(set) + for sgn in self.subgraph.nodes: + sgn_color = self._sgn_colors[sgn] + if sgn_color in self._node_compatibility: + gn_color = self._node_compatibility[sgn_color] + candidates[sgn].add(frozenset(self._gn_partitions[gn_color])) + else: + candidates[sgn].add(frozenset()) + candidates = dict(candidates) + for sgn, options in candidates.items(): + candidates[sgn] = frozenset(options) + return candidates + + @staticmethod + def _make_constraints(cosets): + """ + Turn cosets into constraints. + """ + constraints = [] + for node_i, node_ts in cosets.items(): + for node_t in node_ts: + if node_i != node_t: + # Node i must be smaller than node t. + constraints.append((node_i, node_t)) + return constraints + + @staticmethod + def _find_node_edge_color(graph, node_colors, edge_colors): + """ + For every node in graph, come up with a color that combines 1) the + color of the node, and 2) the number of edges of a color to each type + of node. + """ + counts = defaultdict(lambda: defaultdict(int)) + for node1, node2 in graph.edges: + if (node1, node2) in edge_colors: + # FIXME directed graphs + ecolor = edge_colors[node1, node2] + else: + ecolor = edge_colors[node2, node1] + # Count per node how many edges it has of what color to nodes of + # what color + counts[node1][ecolor, node_colors[node2]] += 1 + counts[node2][ecolor, node_colors[node1]] += 1 + + node_edge_colors = {} + for node in graph.nodes: + node_edge_colors[node] = node_colors[node], set(counts[node].items()) + + return node_edge_colors + + @staticmethod + def _get_permutations_by_length(items): + """ + Get all permutations of items, but only permute items with the same + length. + + >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]])) + >>> answer = [ + ... (([1], [2]), ([3, 4], [4, 5])), + ... (([1], [2]), ([4, 5], [3, 4])), + ... (([2], [1]), ([3, 4], [4, 5])), + ... (([2], [1]), ([4, 5], [3, 4])), + ... ] + >>> found == answer + True + """ + by_len = defaultdict(list) + for item in items: + by_len[len(item)].append(item) + + yield from itertools.product( + *(itertools.permutations(by_len[l]) for l in sorted(by_len)) + ) + + @classmethod + def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False): + """ + Given a partition of nodes in graph, make the partitions smaller such + that all nodes in a partition have 1) the same color, and 2) the same + number of edges to specific other partitions. + """ + + def equal_color(node1, node2): + return node_edge_colors[node1] == node_edge_colors[node2] + + node_partitions = list(node_partitions) + node_colors = partition_to_color(node_partitions) + node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors) + if all( + are_all_equal(node_edge_colors[node] for node in partition) + for partition in node_partitions + ): + yield node_partitions + return + + new_partitions = [] + output = [new_partitions] + for partition in node_partitions: + if not are_all_equal(node_edge_colors[node] for node in partition): + refined = make_partitions(partition, equal_color) + if ( + branch + and len(refined) != 1 + and len({len(r) for r in refined}) != len([len(r) for r in refined]) + ): + # This is where it breaks. There are multiple new cells + # in refined with the same length, and their order + # matters. + # So option 1) Hit it with a big hammer and simply make all + # orderings. + permutations = cls._get_permutations_by_length(refined) + new_output = [] + for n_p in output: + for permutation in permutations: + new_output.append(n_p + list(permutation[0])) + output = new_output + else: + for n_p in output: + n_p.extend(sorted(refined, key=len)) + else: + for n_p in output: + n_p.append(partition) + for n_p in output: + yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch) + + def _edges_of_same_color(self, sgn1, sgn2): + """ + Returns all edges in :attr:`graph` that have the same colour as the + edge between sgn1 and sgn2 in :attr:`subgraph`. + """ + if (sgn1, sgn2) in self._sge_colors: + # FIXME directed graphs + sge_color = self._sge_colors[sgn1, sgn2] + else: + sge_color = self._sge_colors[sgn2, sgn1] + if sge_color in self._edge_compatibility: + ge_color = self._edge_compatibility[sge_color] + g_edges = self._ge_partitions[ge_color] + else: + g_edges = [] + return g_edges + + def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None): + """ + Find all subgraph isomorphisms honoring constraints. + """ + if mapping is None: + mapping = {} + else: + mapping = mapping.copy() + if to_be_mapped is None: + to_be_mapped = set(self.subgraph.nodes) + + # Note, we modify candidates here. Doesn't seem to affect results, but + # remember this. + # candidates = candidates.copy() + sgn_candidates = intersect(candidates[sgn]) + candidates[sgn] = frozenset([sgn_candidates]) + for gn in sgn_candidates: + # We're going to try to map sgn to gn. + if gn in mapping.values() or sgn not in to_be_mapped: + # gn is already mapped to something + continue # pragma: no cover + + # REDUCTION and COMBINATION + mapping[sgn] = gn + # BASECASE + if to_be_mapped == set(mapping.keys()): + yield {v: k for k, v in mapping.items()} + continue + left_to_map = to_be_mapped - set(mapping.keys()) + + new_candidates = candidates.copy() + sgn_nbrs = set(self.subgraph[sgn]) + not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn]) + for sgn2 in left_to_map: + if sgn2 not in sgn_nbrs: + gn2_options = not_gn_nbrs + else: + # Get all edges to gn of the right color: + g_edges = self._edges_of_same_color(sgn, sgn2) + # FIXME directed graphs + # And all nodes involved in those which are connected to gn + gn2_options = {n for e in g_edges for n in e if gn in e} + # Node color compatibility should be taken care of by the + # initial candidate lists made by find_subgraphs + + # Add gn2_options to the right collection. Since new_candidates + # is a dict of frozensets of frozensets of node indices it's + # a bit clunky. We can't do .add, and + also doesn't work. We + # could do |, but I deem union to be clearer. + new_candidates[sgn2] = new_candidates[sgn2].union( + [frozenset(gn2_options)] + ) + + if (sgn, sgn2) in constraints: + gn2_options = {gn2 for gn2 in self.graph if gn2 > gn} + elif (sgn2, sgn) in constraints: + gn2_options = {gn2 for gn2 in self.graph if gn2 < gn} + else: + continue # pragma: no cover + new_candidates[sgn2] = new_candidates[sgn2].union( + [frozenset(gn2_options)] + ) + + # The next node is the one that is unmapped and has fewest + # candidates + next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len)) + yield from self._map_nodes( + next_sgn, + new_candidates, + constraints, + mapping=mapping, + to_be_mapped=to_be_mapped, + ) + # Unmap sgn-gn. Strictly not necessary since it'd get overwritten + # when making a new mapping for sgn. + # del mapping[sgn] + + def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None): + """ + Find all largest common subgraphs honoring constraints. + """ + if to_be_mapped is None: + to_be_mapped = {frozenset(self.subgraph.nodes)} + + # The LCS problem is basically a repeated subgraph isomorphism problem + # with smaller and smaller subgraphs. We store the nodes that are + # "part of" the subgraph in to_be_mapped, and we make it a little + # smaller every iteration. + + current_size = len(next(iter(to_be_mapped), [])) + + found_iso = False + if current_size <= len(self.graph): + # There's no point in trying to find isomorphisms of + # graph >= subgraph if subgraph has more nodes than graph. + + # Try the isomorphism first with the nodes with lowest ID. So sort + # them. Those are more likely to be part of the final + # correspondence. This makes finding the first answer(s) faster. In + # theory. + for nodes in sorted(to_be_mapped, key=sorted): + # Find the isomorphism between subgraph[to_be_mapped] <= graph + next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len)) + isomorphs = self._map_nodes( + next_sgn, candidates, constraints, to_be_mapped=nodes + ) + + # This is effectively `yield from isomorphs`, except that we look + # whether an item was yielded. + try: + item = next(isomorphs) + except StopIteration: + pass + else: + yield item + yield from isomorphs + found_iso = True + + # BASECASE + if found_iso or current_size == 1: + # Shrinking has no point because either 1) we end up with a smaller + # common subgraph (and we want the largest), or 2) there'll be no + # more subgraph. + return + + left_to_be_mapped = set() + for nodes in to_be_mapped: + for sgn in nodes: + # We're going to remove sgn from to_be_mapped, but subject to + # symmetry constraints. We know that for every constraint we + # have those subgraph nodes are equal. So whenever we would + # remove the lower part of a constraint, remove the higher + # instead. This is all dealth with by _remove_node. And because + # left_to_be_mapped is a set, we don't do double work. + + # And finally, make the subgraph one node smaller. + # REDUCTION + new_nodes = self._remove_node(sgn, nodes, constraints) + left_to_be_mapped.add(new_nodes) + # COMBINATION + yield from self._largest_common_subgraph( + candidates, constraints, to_be_mapped=left_to_be_mapped + ) + + @staticmethod + def _remove_node(node, nodes, constraints): + """ + Returns a new set where node has been removed from nodes, subject to + symmetry constraints. We know, that for every constraint we have + those subgraph nodes are equal. So whenever we would remove the + lower part of a constraint, remove the higher instead. + """ + while True: + for low, high in constraints: + if low == node and high in nodes: + node = high + break + else: # no break, couldn't find node in constraints + break + return frozenset(nodes - {node}) + + @staticmethod + def _find_permutations(top_partitions, bottom_partitions): + """ + Return the pairs of top/bottom partitions where the partitions are + different. Ensures that all partitions in both top and bottom + partitions have size 1. + """ + # Find permutations + permutations = set() + for top, bot in zip(top_partitions, bottom_partitions): + # top and bot have only one element + if len(top) != 1 or len(bot) != 1: + raise IndexError( + "Not all nodes are coupled. This is" + f" impossible: {top_partitions}, {bottom_partitions}" + ) + if top != bot: + permutations.add(frozenset((next(iter(top)), next(iter(bot))))) + return permutations + + @staticmethod + def _update_orbits(orbits, permutations): + """ + Update orbits based on permutations. Orbits is modified in place. + For every pair of items in permutations their respective orbits are + merged. + """ + for permutation in permutations: + node, node2 = permutation + # Find the orbits that contain node and node2, and replace the + # orbit containing node with the union + first = second = None + for idx, orbit in enumerate(orbits): + if first is not None and second is not None: + break + if node in orbit: + first = idx + if node2 in orbit: + second = idx + if first != second: + orbits[first].update(orbits[second]) + del orbits[second] + + def _couple_nodes( + self, + top_partitions, + bottom_partitions, + pair_idx, + t_node, + b_node, + graph, + edge_colors, + ): + """ + Generate new partitions from top and bottom_partitions where t_node is + coupled to b_node. pair_idx is the index of the partitions where t_ and + b_node can be found. + """ + t_partition = top_partitions[pair_idx] + b_partition = bottom_partitions[pair_idx] + assert t_node in t_partition and b_node in b_partition + # Couple node to node2. This means they get their own partition + new_top_partitions = [top.copy() for top in top_partitions] + new_bottom_partitions = [bot.copy() for bot in bottom_partitions] + new_t_groups = {t_node}, t_partition - {t_node} + new_b_groups = {b_node}, b_partition - {b_node} + # Replace the old partitions with the coupled ones + del new_top_partitions[pair_idx] + del new_bottom_partitions[pair_idx] + new_top_partitions[pair_idx:pair_idx] = new_t_groups + new_bottom_partitions[pair_idx:pair_idx] = new_b_groups + + new_top_partitions = self._refine_node_partitions( + graph, new_top_partitions, edge_colors + ) + new_bottom_partitions = self._refine_node_partitions( + graph, new_bottom_partitions, edge_colors, branch=True + ) + new_top_partitions = list(new_top_partitions) + assert len(new_top_partitions) == 1 + new_top_partitions = new_top_partitions[0] + for bot in new_bottom_partitions: + yield list(new_top_partitions), bot + + def _process_ordered_pair_partitions( + self, + graph, + top_partitions, + bottom_partitions, + edge_colors, + orbits=None, + cosets=None, + ): + """ + Processes ordered pair partitions as per the reference paper. Finds and + returns all permutations and cosets that leave the graph unchanged. + """ + if orbits is None: + orbits = [{node} for node in graph.nodes] + else: + # Note that we don't copy orbits when we are given one. This means + # we leak information between the recursive branches. This is + # intentional! + orbits = orbits + if cosets is None: + cosets = {} + else: + cosets = cosets.copy() + + assert all( + len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions) + ) + + # BASECASE + if all(len(top) == 1 for top in top_partitions): + # All nodes are mapped + permutations = self._find_permutations(top_partitions, bottom_partitions) + self._update_orbits(orbits, permutations) + if permutations: + return [permutations], cosets + else: + return [], cosets + + permutations = [] + unmapped_nodes = { + (node, idx) + for idx, t_partition in enumerate(top_partitions) + for node in t_partition + if len(t_partition) > 1 + } + node, pair_idx = min(unmapped_nodes) + b_partition = bottom_partitions[pair_idx] + + for node2 in sorted(b_partition): + if len(b_partition) == 1: + # Can never result in symmetry + continue + if node != node2 and any( + node in orbit and node2 in orbit for orbit in orbits + ): + # Orbit prune branch + continue + # REDUCTION + # Couple node to node2 + partitions = self._couple_nodes( + top_partitions, + bottom_partitions, + pair_idx, + node, + node2, + graph, + edge_colors, + ) + for opp in partitions: + new_top_partitions, new_bottom_partitions = opp + + new_perms, new_cosets = self._process_ordered_pair_partitions( + graph, + new_top_partitions, + new_bottom_partitions, + edge_colors, + orbits, + cosets, + ) + # COMBINATION + permutations += new_perms + cosets.update(new_cosets) + + mapped = { + k + for top, bottom in zip(top_partitions, bottom_partitions) + for k in top + if len(top) == 1 and top == bottom + } + ks = {k for k in graph.nodes if k < node} + # Have all nodes with ID < node been mapped? + find_coset = ks <= mapped and node not in cosets + if find_coset: + # Find the orbit that contains node + for orbit in orbits: + if node in orbit: + cosets[node] = orbit.copy() + return permutations, cosets diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py new file mode 100644 index 0000000000000000000000000000000000000000..00395b71cc1fafb19d7809ba1d598cdc040e36f8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py @@ -0,0 +1,248 @@ +""" +Graph isomorphism functions. +""" +import networkx as nx +from networkx.exception import NetworkXError + +__all__ = [ + "could_be_isomorphic", + "fast_could_be_isomorphic", + "faster_could_be_isomorphic", + "is_isomorphic", +] + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) +def could_be_isomorphic(G1, G2): + """Returns False if graphs are definitely not isomorphic. + True does NOT guarantee isomorphism. + + Parameters + ---------- + G1, G2 : graphs + The two graphs G1 and G2 must be the same type. + + Notes + ----- + Checks for matching degree, triangle, and number of cliques sequences. + The triangle sequence contains the number of triangles each node is part of. + The clique sequence contains for each node the number of maximal cliques + involving that node. + + """ + + # Check global properties + if G1.order() != G2.order(): + return False + + # Check local properties + d1 = G1.degree() + t1 = nx.triangles(G1) + clqs_1 = list(nx.find_cliques(G1)) + c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1} # number of cliques + props1 = [[d, t1[v], c1[v]] for v, d in d1] + props1.sort() + + d2 = G2.degree() + t2 = nx.triangles(G2) + clqs_2 = list(nx.find_cliques(G2)) + c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2} # number of cliques + props2 = [[d, t2[v], c2[v]] for v, d in d2] + props2.sort() + + if props1 != props2: + return False + + # OK... + return True + + +graph_could_be_isomorphic = could_be_isomorphic + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) +def fast_could_be_isomorphic(G1, G2): + """Returns False if graphs are definitely not isomorphic. + + True does NOT guarantee isomorphism. + + Parameters + ---------- + G1, G2 : graphs + The two graphs G1 and G2 must be the same type. + + Notes + ----- + Checks for matching degree and triangle sequences. The triangle + sequence contains the number of triangles each node is part of. + """ + # Check global properties + if G1.order() != G2.order(): + return False + + # Check local properties + d1 = G1.degree() + t1 = nx.triangles(G1) + props1 = [[d, t1[v]] for v, d in d1] + props1.sort() + + d2 = G2.degree() + t2 = nx.triangles(G2) + props2 = [[d, t2[v]] for v, d in d2] + props2.sort() + + if props1 != props2: + return False + + # OK... + return True + + +fast_graph_could_be_isomorphic = fast_could_be_isomorphic + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) +def faster_could_be_isomorphic(G1, G2): + """Returns False if graphs are definitely not isomorphic. + + True does NOT guarantee isomorphism. + + Parameters + ---------- + G1, G2 : graphs + The two graphs G1 and G2 must be the same type. + + Notes + ----- + Checks for matching degree sequences. + """ + # Check global properties + if G1.order() != G2.order(): + return False + + # Check local properties + d1 = sorted(d for n, d in G1.degree()) + d2 = sorted(d for n, d in G2.degree()) + + if d1 != d2: + return False + + # OK... + return True + + +faster_graph_could_be_isomorphic = faster_could_be_isomorphic + + +@nx._dispatchable( + graphs={"G1": 0, "G2": 1}, + preserve_edge_attrs="edge_match", + preserve_node_attrs="node_match", +) +def is_isomorphic(G1, G2, node_match=None, edge_match=None): + """Returns True if the graphs G1 and G2 are isomorphic and False otherwise. + + Parameters + ---------- + G1, G2: graphs + The two graphs G1 and G2 must be the same type. + + node_match : callable + A function that returns True if node n1 in G1 and n2 in G2 should + be considered equal during the isomorphism test. + If node_match is not specified then node attributes are not considered. + + The function will be called like + + node_match(G1.nodes[n1], G2.nodes[n2]). + + That is, the function will receive the node attribute dictionaries + for n1 and n2 as inputs. + + edge_match : callable + A function that returns True if the edge attribute dictionary + for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should + be considered equal during the isomorphism test. If edge_match is + not specified then edge attributes are not considered. + + The function will be called like + + edge_match(G1[u1][v1], G2[u2][v2]). + + That is, the function will receive the edge attribute dictionaries + of the edges under consideration. + + Notes + ----- + Uses the vf2 algorithm [1]_. + + Examples + -------- + >>> import networkx.algorithms.isomorphism as iso + + For digraphs G1 and G2, using 'weight' edge attribute (default: 1) + + >>> G1 = nx.DiGraph() + >>> G2 = nx.DiGraph() + >>> nx.add_path(G1, [1, 2, 3, 4], weight=1) + >>> nx.add_path(G2, [10, 20, 30, 40], weight=2) + >>> em = iso.numerical_edge_match("weight", 1) + >>> nx.is_isomorphic(G1, G2) # no weights considered + True + >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights + False + + For multidigraphs G1 and G2, using 'fill' node attribute (default: '') + + >>> G1 = nx.MultiDiGraph() + >>> G2 = nx.MultiDiGraph() + >>> G1.add_nodes_from([1, 2, 3], fill="red") + >>> G2.add_nodes_from([10, 20, 30, 40], fill="red") + >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5) + >>> nx.add_path(G2, [10, 20, 30, 40], weight=3) + >>> nm = iso.categorical_node_match("fill", "red") + >>> nx.is_isomorphic(G1, G2, node_match=nm) + True + + For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7) + + >>> G1.add_edge(1, 2, weight=7) + 1 + >>> G2.add_edge(10, 20) + 1 + >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6) + >>> nx.is_isomorphic(G1, G2, edge_match=em) + True + + For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes + with default values 7 and 2.5. Also using 'fill' node attribute with + default value 'red'. + + >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5]) + >>> nm = iso.categorical_node_match("fill", "red") + >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm) + True + + See Also + -------- + numerical_node_match, numerical_edge_match, numerical_multiedge_match + categorical_node_match, categorical_edge_match, categorical_multiedge_match + + References + ---------- + .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, + "An Improved Algorithm for Matching Large Graphs", + 3rd IAPR-TC15 Workshop on Graph-based Representations in + Pattern Recognition, Cuen, pp. 149-159, 2001. + https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs + """ + if G1.is_directed() and G2.is_directed(): + GM = nx.algorithms.isomorphism.DiGraphMatcher + elif (not G1.is_directed()) and (not G2.is_directed()): + GM = nx.algorithms.isomorphism.GraphMatcher + else: + raise NetworkXError("Graphs G1 and G2 are not of the same type.") + + gm = GM(G1, G2, node_match=node_match, edge_match=edge_match) + + return gm.is_isomorphic() diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py new file mode 100644 index 0000000000000000000000000000000000000000..1b6cc7d75b186cd793ae65cbdd00ccbeb25bd27c --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py @@ -0,0 +1,1065 @@ +""" +************* +VF2 Algorithm +************* + +An implementation of VF2 algorithm for graph isomorphism testing. + +The simplest interface to use this module is to call the +:func:`is_isomorphic ` +function. + +Introduction +------------ + +The GraphMatcher and DiGraphMatcher are responsible for matching +graphs or directed graphs in a predetermined manner. This +usually means a check for an isomorphism, though other checks +are also possible. For example, a subgraph of one graph +can be checked for isomorphism to a second graph. + +Matching is done via syntactic feasibility. It is also possible +to check for semantic feasibility. Feasibility, then, is defined +as the logical AND of the two functions. + +To include a semantic check, the (Di)GraphMatcher class should be +subclassed, and the +:meth:`semantic_feasibility ` +function should be redefined. By default, the semantic feasibility function always +returns ``True``. The effect of this is that semantics are not +considered in the matching of G1 and G2. + +Examples +-------- + +Suppose G1 and G2 are isomorphic graphs. Verification is as follows: + +>>> from networkx.algorithms import isomorphism +>>> G1 = nx.path_graph(4) +>>> G2 = nx.path_graph(4) +>>> GM = isomorphism.GraphMatcher(G1, G2) +>>> GM.is_isomorphic() +True + +GM.mapping stores the isomorphism mapping from G1 to G2. + +>>> GM.mapping +{0: 0, 1: 1, 2: 2, 3: 3} + + +Suppose G1 and G2 are isomorphic directed graphs. +Verification is as follows: + +>>> G1 = nx.path_graph(4, create_using=nx.DiGraph()) +>>> G2 = nx.path_graph(4, create_using=nx.DiGraph()) +>>> DiGM = isomorphism.DiGraphMatcher(G1, G2) +>>> DiGM.is_isomorphic() +True + +DiGM.mapping stores the isomorphism mapping from G1 to G2. + +>>> DiGM.mapping +{0: 0, 1: 1, 2: 2, 3: 3} + + + +Subgraph Isomorphism +-------------------- +Graph theory literature can be ambiguous about the meaning of the +above statement, and we seek to clarify it now. + +In the VF2 literature, a mapping `M` is said to be a graph-subgraph +isomorphism iff `M` is an isomorphism between `G2` and a subgraph of `G1`. +Thus, to say that `G1` and `G2` are graph-subgraph isomorphic is to say +that a subgraph of `G1` is isomorphic to `G2`. + +Other literature uses the phrase 'subgraph isomorphic' as in '`G1` does +not have a subgraph isomorphic to `G2`'. Another use is as an in adverb +for isomorphic. Thus, to say that `G1` and `G2` are subgraph isomorphic +is to say that a subgraph of `G1` is isomorphic to `G2`. + +Finally, the term 'subgraph' can have multiple meanings. In this +context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced +subgraph isomorphisms are not directly supported, but one should be +able to perform the check by making use of +:func:`line_graph `. For +subgraphs which are not induced, the term 'monomorphism' is preferred +over 'isomorphism'. + +Let ``G = (N, E)`` be a graph with a set of nodes `N` and set of edges `E`. + +If ``G' = (N', E')`` is a subgraph, then: + `N'` is a subset of `N` and + `E'` is a subset of `E`. + +If ``G' = (N', E')`` is a node-induced subgraph, then: + `N'` is a subset of `N` and + `E'` is the subset of edges in `E` relating nodes in `N'`. + +If `G' = (N', E')` is an edge-induced subgraph, then: + `N'` is the subset of nodes in `N` related by edges in `E'` and + `E'` is a subset of `E`. + +If `G' = (N', E')` is a monomorphism, then: + `N'` is a subset of `N` and + `E'` is a subset of the set of edges in `E` relating nodes in `N'`. + +Note that if `G'` is a node-induced subgraph of `G`, then it is always a +subgraph monomorphism of `G`, but the opposite is not always true, as a +monomorphism can have fewer edges. + +References +---------- +[1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento, + "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs", + IEEE Transactions on Pattern Analysis and Machine Intelligence, + vol. 26, no. 10, pp. 1367-1372, Oct., 2004. + http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf + +[2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved + Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop + on Graph-based Representations in Pattern Recognition, Cuen, + pp. 149-159, 2001. + https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs + +See Also +-------- +:meth:`semantic_feasibility ` +:meth:`syntactic_feasibility ` + +Notes +----- + +The implementation handles both directed and undirected graphs as well +as multigraphs. + +In general, the subgraph isomorphism problem is NP-complete whereas the +graph isomorphism problem is most likely not NP-complete (although no +polynomial-time algorithm is known to exist). + +""" + +# This work was originally coded by Christopher Ellison +# as part of the Computational Mechanics Python (CMPy) project. +# James P. Crutchfield, principal investigator. +# Complexity Sciences Center and Physics Department, UC Davis. + +import sys + +__all__ = ["GraphMatcher", "DiGraphMatcher"] + + +class GraphMatcher: + """Implementation of VF2 algorithm for matching undirected graphs. + + Suitable for Graph and MultiGraph instances. + """ + + def __init__(self, G1, G2): + """Initialize GraphMatcher. + + Parameters + ---------- + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism. + + Examples + -------- + To create a GraphMatcher which checks for syntactic feasibility: + + >>> from networkx.algorithms import isomorphism + >>> G1 = nx.path_graph(4) + >>> G2 = nx.path_graph(4) + >>> GM = isomorphism.GraphMatcher(G1, G2) + """ + self.G1 = G1 + self.G2 = G2 + self.G1_nodes = set(G1.nodes()) + self.G2_nodes = set(G2.nodes()) + self.G2_node_order = {n: i for i, n in enumerate(G2)} + + # Set recursion limit. + self.old_recursion_limit = sys.getrecursionlimit() + expected_max_recursion_level = len(self.G2) + if self.old_recursion_limit < 1.5 * expected_max_recursion_level: + # Give some breathing room. + sys.setrecursionlimit(int(1.5 * expected_max_recursion_level)) + + # Declare that we will be searching for a graph-graph isomorphism. + self.test = "graph" + + # Initialize state + self.initialize() + + def reset_recursion_limit(self): + """Restores the recursion limit.""" + # TODO: + # Currently, we use recursion and set the recursion level higher. + # It would be nice to restore the level, but because the + # (Di)GraphMatcher classes make use of cyclic references, garbage + # collection will never happen when we define __del__() to + # restore the recursion level. The result is a memory leak. + # So for now, we do not automatically restore the recursion level, + # and instead provide a method to do this manually. Eventually, + # we should turn this into a non-recursive implementation. + sys.setrecursionlimit(self.old_recursion_limit) + + def candidate_pairs_iter(self): + """Iterator over candidate pairs of nodes in G1 and G2.""" + + # All computations are done using the current state! + + G1_nodes = self.G1_nodes + G2_nodes = self.G2_nodes + min_key = self.G2_node_order.__getitem__ + + # First we compute the inout-terminal sets. + T1_inout = [node for node in self.inout_1 if node not in self.core_1] + T2_inout = [node for node in self.inout_2 if node not in self.core_2] + + # If T1_inout and T2_inout are both nonempty. + # P(s) = T1_inout x {min T2_inout} + if T1_inout and T2_inout: + node_2 = min(T2_inout, key=min_key) + for node_1 in T1_inout: + yield node_1, node_2 + + else: + # If T1_inout and T2_inout were both empty.... + # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} + # if not (T1_inout or T2_inout): # as suggested by [2], incorrect + if 1: # as inferred from [1], correct + # First we determine the candidate node for G2 + other_node = min(G2_nodes - set(self.core_2), key=min_key) + for node in self.G1: + if node not in self.core_1: + yield node, other_node + + # For all other cases, we don't have any candidate pairs. + + def initialize(self): + """Reinitializes the state of the algorithm. + + This method should be redefined if using something other than GMState. + If only subclassing GraphMatcher, a redefinition is not necessary. + + """ + + # core_1[n] contains the index of the node paired with n, which is m, + # provided n is in the mapping. + # core_2[m] contains the index of the node paired with m, which is n, + # provided m is in the mapping. + self.core_1 = {} + self.core_2 = {} + + # See the paper for definitions of M_x and T_x^{y} + + # inout_1[n] is non-zero if n is in M_1 or in T_1^{inout} + # inout_2[m] is non-zero if m is in M_2 or in T_2^{inout} + # + # The value stored is the depth of the SSR tree when the node became + # part of the corresponding set. + self.inout_1 = {} + self.inout_2 = {} + # Practically, these sets simply store the nodes in the subgraph. + + self.state = GMState(self) + + # Provide a convenient way to access the isomorphism mapping. + self.mapping = self.core_1.copy() + + def is_isomorphic(self): + """Returns True if G1 and G2 are isomorphic graphs.""" + + # Let's do two very quick checks! + # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)? + # For now, I just copy the code. + + # Check global properties + if self.G1.order() != self.G2.order(): + return False + + # Check local properties + d1 = sorted(d for n, d in self.G1.degree()) + d2 = sorted(d for n, d in self.G2.degree()) + if d1 != d2: + return False + + try: + x = next(self.isomorphisms_iter()) + return True + except StopIteration: + return False + + def isomorphisms_iter(self): + """Generator over isomorphisms between G1 and G2.""" + # Declare that we are looking for a graph-graph isomorphism. + self.test = "graph" + self.initialize() + yield from self.match() + + def match(self): + """Extends the isomorphism mapping. + + This function is called recursively to determine if a complete + isomorphism can be found between G1 and G2. It cleans up the class + variables after each recursive call. If an isomorphism is found, + we yield the mapping. + + """ + if len(self.core_1) == len(self.G2): + # Save the final mapping, otherwise garbage collection deletes it. + self.mapping = self.core_1.copy() + # The mapping is complete. + yield self.mapping + else: + for G1_node, G2_node in self.candidate_pairs_iter(): + if self.syntactic_feasibility(G1_node, G2_node): + if self.semantic_feasibility(G1_node, G2_node): + # Recursive call, adding the feasible state. + newstate = self.state.__class__(self, G1_node, G2_node) + yield from self.match() + + # restore data structures + newstate.restore() + + def semantic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is semantically feasible. + + The semantic feasibility function should return True if it is + acceptable to add the candidate pair (G1_node, G2_node) to the current + partial isomorphism mapping. The logic should focus on semantic + information contained in the edge data or a formalized node class. + + By acceptable, we mean that the subsequent mapping can still become a + complete isomorphism mapping. Thus, if adding the candidate pair + definitely makes it so that the subsequent mapping cannot become a + complete isomorphism mapping, then this function must return False. + + The default semantic feasibility function always returns True. The + effect is that semantics are not considered in the matching of G1 + and G2. + + The semantic checks might differ based on the what type of test is + being performed. A keyword description of the test is stored in + self.test. Here is a quick description of the currently implemented + tests:: + + test='graph' + Indicates that the graph matcher is looking for a graph-graph + isomorphism. + + test='subgraph' + Indicates that the graph matcher is looking for a subgraph-graph + isomorphism such that a subgraph of G1 is isomorphic to G2. + + test='mono' + Indicates that the graph matcher is looking for a subgraph-graph + monomorphism such that a subgraph of G1 is monomorphic to G2. + + Any subclass which redefines semantic_feasibility() must maintain + the above form to keep the match() method functional. Implementations + should consider multigraphs. + """ + return True + + def subgraph_is_isomorphic(self): + """Returns True if a subgraph of G1 is isomorphic to G2.""" + try: + x = next(self.subgraph_isomorphisms_iter()) + return True + except StopIteration: + return False + + def subgraph_is_monomorphic(self): + """Returns True if a subgraph of G1 is monomorphic to G2.""" + try: + x = next(self.subgraph_monomorphisms_iter()) + return True + except StopIteration: + return False + + # subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) + + def subgraph_isomorphisms_iter(self): + """Generator over isomorphisms between a subgraph of G1 and G2.""" + # Declare that we are looking for graph-subgraph isomorphism. + self.test = "subgraph" + self.initialize() + yield from self.match() + + def subgraph_monomorphisms_iter(self): + """Generator over monomorphisms between a subgraph of G1 and G2.""" + # Declare that we are looking for graph-subgraph monomorphism. + self.test = "mono" + self.initialize() + yield from self.match() + + # subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) + + def syntactic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is syntactically feasible. + + This function returns True if it is adding the candidate pair + to the current partial isomorphism/monomorphism mapping is allowable. + The addition is allowable if the inclusion of the candidate pair does + not make it impossible for an isomorphism/monomorphism to be found. + """ + + # The VF2 algorithm was designed to work with graphs having, at most, + # one edge connecting any two nodes. This is not the case when + # dealing with an MultiGraphs. + # + # Basically, when we test the look-ahead rules R_neighbor, we will + # make sure that the number of edges are checked. We also add + # a R_self check to verify that the number of selfloops is acceptable. + # + # Users might be comparing Graph instances with MultiGraph instances. + # So the generic GraphMatcher class must work with MultiGraphs. + # Care must be taken since the value in the innermost dictionary is a + # singlet for Graph instances. For MultiGraphs, the value in the + # innermost dictionary is a list. + + ### + # Test at each step to get a return value as soon as possible. + ### + + # Look ahead 0 + + # R_self + + # The number of selfloops for G1_node must equal the number of + # self-loops for G2_node. Without this check, we would fail on + # R_neighbor at the next recursion level. But it is good to prune the + # search tree now. + + if self.test == "mono": + if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges( + G2_node, G2_node + ): + return False + else: + if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges( + G2_node, G2_node + ): + return False + + # R_neighbor + + # For each neighbor n' of n in the partial mapping, the corresponding + # node m' is a neighbor of m, and vice versa. Also, the number of + # edges must be equal. + if self.test != "mono": + for neighbor in self.G1[G1_node]: + if neighbor in self.core_1: + if self.core_1[neighbor] not in self.G2[G2_node]: + return False + elif self.G1.number_of_edges( + neighbor, G1_node + ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node): + return False + + for neighbor in self.G2[G2_node]: + if neighbor in self.core_2: + if self.core_2[neighbor] not in self.G1[G1_node]: + return False + elif self.test == "mono": + if self.G1.number_of_edges( + self.core_2[neighbor], G1_node + ) < self.G2.number_of_edges(neighbor, G2_node): + return False + else: + if self.G1.number_of_edges( + self.core_2[neighbor], G1_node + ) != self.G2.number_of_edges(neighbor, G2_node): + return False + + if self.test != "mono": + # Look ahead 1 + + # R_terminout + # The number of neighbors of n in T_1^{inout} is equal to the + # number of neighbors of m that are in T_2^{inout}, and vice versa. + num1 = 0 + for neighbor in self.G1[G1_node]: + if (neighbor in self.inout_1) and (neighbor not in self.core_1): + num1 += 1 + num2 = 0 + for neighbor in self.G2[G2_node]: + if (neighbor in self.inout_2) and (neighbor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # Look ahead 2 + + # R_new + + # The number of neighbors of n that are neither in the core_1 nor + # T_1^{inout} is equal to the number of neighbors of m + # that are neither in core_2 nor T_2^{inout}. + num1 = 0 + for neighbor in self.G1[G1_node]: + if neighbor not in self.inout_1: + num1 += 1 + num2 = 0 + for neighbor in self.G2[G2_node]: + if neighbor not in self.inout_2: + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # Otherwise, this node pair is syntactically feasible! + return True + + +class DiGraphMatcher(GraphMatcher): + """Implementation of VF2 algorithm for matching directed graphs. + + Suitable for DiGraph and MultiDiGraph instances. + """ + + def __init__(self, G1, G2): + """Initialize DiGraphMatcher. + + G1 and G2 should be nx.Graph or nx.MultiGraph instances. + + Examples + -------- + To create a GraphMatcher which checks for syntactic feasibility: + + >>> from networkx.algorithms import isomorphism + >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) + >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) + >>> DiGM = isomorphism.DiGraphMatcher(G1, G2) + """ + super().__init__(G1, G2) + + def candidate_pairs_iter(self): + """Iterator over candidate pairs of nodes in G1 and G2.""" + + # All computations are done using the current state! + + G1_nodes = self.G1_nodes + G2_nodes = self.G2_nodes + min_key = self.G2_node_order.__getitem__ + + # First we compute the out-terminal sets. + T1_out = [node for node in self.out_1 if node not in self.core_1] + T2_out = [node for node in self.out_2 if node not in self.core_2] + + # If T1_out and T2_out are both nonempty. + # P(s) = T1_out x {min T2_out} + if T1_out and T2_out: + node_2 = min(T2_out, key=min_key) + for node_1 in T1_out: + yield node_1, node_2 + + # If T1_out and T2_out were both empty.... + # We compute the in-terminal sets. + + # elif not (T1_out or T2_out): # as suggested by [2], incorrect + else: # as suggested by [1], correct + T1_in = [node for node in self.in_1 if node not in self.core_1] + T2_in = [node for node in self.in_2 if node not in self.core_2] + + # If T1_in and T2_in are both nonempty. + # P(s) = T1_out x {min T2_out} + if T1_in and T2_in: + node_2 = min(T2_in, key=min_key) + for node_1 in T1_in: + yield node_1, node_2 + + # If all terminal sets are empty... + # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} + + # elif not (T1_in or T2_in): # as suggested by [2], incorrect + else: # as inferred from [1], correct + node_2 = min(G2_nodes - set(self.core_2), key=min_key) + for node_1 in G1_nodes: + if node_1 not in self.core_1: + yield node_1, node_2 + + # For all other cases, we don't have any candidate pairs. + + def initialize(self): + """Reinitializes the state of the algorithm. + + This method should be redefined if using something other than DiGMState. + If only subclassing GraphMatcher, a redefinition is not necessary. + """ + + # core_1[n] contains the index of the node paired with n, which is m, + # provided n is in the mapping. + # core_2[m] contains the index of the node paired with m, which is n, + # provided m is in the mapping. + self.core_1 = {} + self.core_2 = {} + + # See the paper for definitions of M_x and T_x^{y} + + # in_1[n] is non-zero if n is in M_1 or in T_1^{in} + # out_1[n] is non-zero if n is in M_1 or in T_1^{out} + # + # in_2[m] is non-zero if m is in M_2 or in T_2^{in} + # out_2[m] is non-zero if m is in M_2 or in T_2^{out} + # + # The value stored is the depth of the search tree when the node became + # part of the corresponding set. + self.in_1 = {} + self.in_2 = {} + self.out_1 = {} + self.out_2 = {} + + self.state = DiGMState(self) + + # Provide a convenient way to access the isomorphism mapping. + self.mapping = self.core_1.copy() + + def syntactic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is syntactically feasible. + + This function returns True if it is adding the candidate pair + to the current partial isomorphism/monomorphism mapping is allowable. + The addition is allowable if the inclusion of the candidate pair does + not make it impossible for an isomorphism/monomorphism to be found. + """ + + # The VF2 algorithm was designed to work with graphs having, at most, + # one edge connecting any two nodes. This is not the case when + # dealing with an MultiGraphs. + # + # Basically, when we test the look-ahead rules R_pred and R_succ, we + # will make sure that the number of edges are checked. We also add + # a R_self check to verify that the number of selfloops is acceptable. + + # Users might be comparing DiGraph instances with MultiDiGraph + # instances. So the generic DiGraphMatcher class must work with + # MultiDiGraphs. Care must be taken since the value in the innermost + # dictionary is a singlet for DiGraph instances. For MultiDiGraphs, + # the value in the innermost dictionary is a list. + + ### + # Test at each step to get a return value as soon as possible. + ### + + # Look ahead 0 + + # R_self + + # The number of selfloops for G1_node must equal the number of + # self-loops for G2_node. Without this check, we would fail on R_pred + # at the next recursion level. This should prune the tree even further. + if self.test == "mono": + if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges( + G2_node, G2_node + ): + return False + else: + if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges( + G2_node, G2_node + ): + return False + + # R_pred + + # For each predecessor n' of n in the partial mapping, the + # corresponding node m' is a predecessor of m, and vice versa. Also, + # the number of edges must be equal + if self.test != "mono": + for predecessor in self.G1.pred[G1_node]: + if predecessor in self.core_1: + if self.core_1[predecessor] not in self.G2.pred[G2_node]: + return False + elif self.G1.number_of_edges( + predecessor, G1_node + ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node): + return False + + for predecessor in self.G2.pred[G2_node]: + if predecessor in self.core_2: + if self.core_2[predecessor] not in self.G1.pred[G1_node]: + return False + elif self.test == "mono": + if self.G1.number_of_edges( + self.core_2[predecessor], G1_node + ) < self.G2.number_of_edges(predecessor, G2_node): + return False + else: + if self.G1.number_of_edges( + self.core_2[predecessor], G1_node + ) != self.G2.number_of_edges(predecessor, G2_node): + return False + + # R_succ + + # For each successor n' of n in the partial mapping, the corresponding + # node m' is a successor of m, and vice versa. Also, the number of + # edges must be equal. + if self.test != "mono": + for successor in self.G1[G1_node]: + if successor in self.core_1: + if self.core_1[successor] not in self.G2[G2_node]: + return False + elif self.G1.number_of_edges( + G1_node, successor + ) != self.G2.number_of_edges(G2_node, self.core_1[successor]): + return False + + for successor in self.G2[G2_node]: + if successor in self.core_2: + if self.core_2[successor] not in self.G1[G1_node]: + return False + elif self.test == "mono": + if self.G1.number_of_edges( + G1_node, self.core_2[successor] + ) < self.G2.number_of_edges(G2_node, successor): + return False + else: + if self.G1.number_of_edges( + G1_node, self.core_2[successor] + ) != self.G2.number_of_edges(G2_node, successor): + return False + + if self.test != "mono": + # Look ahead 1 + + # R_termin + # The number of predecessors of n that are in T_1^{in} is equal to the + # number of predecessors of m that are in T_2^{in}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor in self.in_1) and (predecessor not in self.core_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor in self.in_2) and (predecessor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # The number of successors of n that are in T_1^{in} is equal to the + # number of successors of m that are in T_2^{in}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor in self.in_1) and (successor not in self.core_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor in self.in_2) and (successor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # R_termout + + # The number of predecessors of n that are in T_1^{out} is equal to the + # number of predecessors of m that are in T_2^{out}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor in self.out_1) and (predecessor not in self.core_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor in self.out_2) and (predecessor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # The number of successors of n that are in T_1^{out} is equal to the + # number of successors of m that are in T_2^{out}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor in self.out_1) and (successor not in self.core_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor in self.out_2) and (successor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # Look ahead 2 + + # R_new + + # The number of predecessors of n that are neither in the core_1 nor + # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m + # that are neither in core_2 nor T_2^{in} nor T_2^{out}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor not in self.in_1) and (predecessor not in self.out_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor not in self.in_2) and (predecessor not in self.out_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # The number of successors of n that are neither in the core_1 nor + # T_1^{in} nor T_1^{out} is equal to the number of successors of m + # that are neither in core_2 nor T_2^{in} nor T_2^{out}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor not in self.in_1) and (successor not in self.out_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor not in self.in_2) and (successor not in self.out_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # Otherwise, this node pair is syntactically feasible! + return True + + +class GMState: + """Internal representation of state for the GraphMatcher class. + + This class is used internally by the GraphMatcher class. It is used + only to store state specific data. There will be at most G2.order() of + these objects in memory at a time, due to the depth-first search + strategy employed by the VF2 algorithm. + """ + + def __init__(self, GM, G1_node=None, G2_node=None): + """Initializes GMState object. + + Pass in the GraphMatcher to which this GMState belongs and the + new node pair that will be added to the GraphMatcher's current + isomorphism mapping. + """ + self.GM = GM + + # Initialize the last stored node pair. + self.G1_node = None + self.G2_node = None + self.depth = len(GM.core_1) + + if G1_node is None or G2_node is None: + # Then we reset the class variables + GM.core_1 = {} + GM.core_2 = {} + GM.inout_1 = {} + GM.inout_2 = {} + + # Watch out! G1_node == 0 should evaluate to True. + if G1_node is not None and G2_node is not None: + # Add the node pair to the isomorphism mapping. + GM.core_1[G1_node] = G2_node + GM.core_2[G2_node] = G1_node + + # Store the node that was added last. + self.G1_node = G1_node + self.G2_node = G2_node + + # Now we must update the other two vectors. + # We will add only if it is not in there already! + self.depth = len(GM.core_1) + + # First we add the new nodes... + if G1_node not in GM.inout_1: + GM.inout_1[G1_node] = self.depth + if G2_node not in GM.inout_2: + GM.inout_2[G2_node] = self.depth + + # Now we add every other node... + + # Updates for T_1^{inout} + new_nodes = set() + for node in GM.core_1: + new_nodes.update( + [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1] + ) + for node in new_nodes: + if node not in GM.inout_1: + GM.inout_1[node] = self.depth + + # Updates for T_2^{inout} + new_nodes = set() + for node in GM.core_2: + new_nodes.update( + [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2] + ) + for node in new_nodes: + if node not in GM.inout_2: + GM.inout_2[node] = self.depth + + def restore(self): + """Deletes the GMState object and restores the class variables.""" + # First we remove the node that was added from the core vectors. + # Watch out! G1_node == 0 should evaluate to True. + if self.G1_node is not None and self.G2_node is not None: + del self.GM.core_1[self.G1_node] + del self.GM.core_2[self.G2_node] + + # Now we revert the other two vectors. + # Thus, we delete all entries which have this depth level. + for vector in (self.GM.inout_1, self.GM.inout_2): + for node in list(vector.keys()): + if vector[node] == self.depth: + del vector[node] + + +class DiGMState: + """Internal representation of state for the DiGraphMatcher class. + + This class is used internally by the DiGraphMatcher class. It is used + only to store state specific data. There will be at most G2.order() of + these objects in memory at a time, due to the depth-first search + strategy employed by the VF2 algorithm. + + """ + + def __init__(self, GM, G1_node=None, G2_node=None): + """Initializes DiGMState object. + + Pass in the DiGraphMatcher to which this DiGMState belongs and the + new node pair that will be added to the GraphMatcher's current + isomorphism mapping. + """ + self.GM = GM + + # Initialize the last stored node pair. + self.G1_node = None + self.G2_node = None + self.depth = len(GM.core_1) + + if G1_node is None or G2_node is None: + # Then we reset the class variables + GM.core_1 = {} + GM.core_2 = {} + GM.in_1 = {} + GM.in_2 = {} + GM.out_1 = {} + GM.out_2 = {} + + # Watch out! G1_node == 0 should evaluate to True. + if G1_node is not None and G2_node is not None: + # Add the node pair to the isomorphism mapping. + GM.core_1[G1_node] = G2_node + GM.core_2[G2_node] = G1_node + + # Store the node that was added last. + self.G1_node = G1_node + self.G2_node = G2_node + + # Now we must update the other four vectors. + # We will add only if it is not in there already! + self.depth = len(GM.core_1) + + # First we add the new nodes... + for vector in (GM.in_1, GM.out_1): + if G1_node not in vector: + vector[G1_node] = self.depth + for vector in (GM.in_2, GM.out_2): + if G2_node not in vector: + vector[G2_node] = self.depth + + # Now we add every other node... + + # Updates for T_1^{in} + new_nodes = set() + for node in GM.core_1: + new_nodes.update( + [ + predecessor + for predecessor in GM.G1.predecessors(node) + if predecessor not in GM.core_1 + ] + ) + for node in new_nodes: + if node not in GM.in_1: + GM.in_1[node] = self.depth + + # Updates for T_2^{in} + new_nodes = set() + for node in GM.core_2: + new_nodes.update( + [ + predecessor + for predecessor in GM.G2.predecessors(node) + if predecessor not in GM.core_2 + ] + ) + for node in new_nodes: + if node not in GM.in_2: + GM.in_2[node] = self.depth + + # Updates for T_1^{out} + new_nodes = set() + for node in GM.core_1: + new_nodes.update( + [ + successor + for successor in GM.G1.successors(node) + if successor not in GM.core_1 + ] + ) + for node in new_nodes: + if node not in GM.out_1: + GM.out_1[node] = self.depth + + # Updates for T_2^{out} + new_nodes = set() + for node in GM.core_2: + new_nodes.update( + [ + successor + for successor in GM.G2.successors(node) + if successor not in GM.core_2 + ] + ) + for node in new_nodes: + if node not in GM.out_2: + GM.out_2[node] = self.depth + + def restore(self): + """Deletes the DiGMState object and restores the class variables.""" + + # First we remove the node that was added from the core vectors. + # Watch out! G1_node == 0 should evaluate to True. + if self.G1_node is not None and self.G2_node is not None: + del self.GM.core_1[self.G1_node] + del self.GM.core_2[self.G2_node] + + # Now we revert the other four vectors. + # Thus, we delete all entries which have this depth level. + for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2): + for node in list(vector.keys()): + if vector[node] == self.depth: + del vector[node] diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py new file mode 100644 index 0000000000000000000000000000000000000000..8185f34ebde5b4b142fb2d98f7e8c7b8528b905c --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py @@ -0,0 +1,351 @@ +"""Functions which help end users define customize node_match and +edge_match functions to use during isomorphism checks. +""" +import math +import types +from itertools import permutations + +__all__ = [ + "categorical_node_match", + "categorical_edge_match", + "categorical_multiedge_match", + "numerical_node_match", + "numerical_edge_match", + "numerical_multiedge_match", + "generic_node_match", + "generic_edge_match", + "generic_multiedge_match", +] + + +def copyfunc(f, name=None): + """Returns a deepcopy of a function.""" + return types.FunctionType( + f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__ + ) + + +def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08): + """Returns True if x and y are sufficiently close, elementwise. + + Parameters + ---------- + rtol : float + The relative error tolerance. + atol : float + The absolute error tolerance. + + """ + # assume finite weights, see numpy.allclose() for reference + return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y)) + + +categorical_doc = """ +Returns a comparison function for a categorical node attribute. + +The value(s) of the attr(s) must be hashable and comparable via the == +operator since they are placed into a set([]) object. If the sets from +G1 and G2 are the same, then the constructed function returns True. + +Parameters +---------- +attr : string | list + The categorical node attribute to compare, or a list of categorical + node attributes to compare. +default : value | list + The default value for the categorical node attribute, or a list of + default values for the categorical node attributes. + +Returns +------- +match : function + The customized, categorical `node_match` function. + +Examples +-------- +>>> import networkx.algorithms.isomorphism as iso +>>> nm = iso.categorical_node_match("size", 1) +>>> nm = iso.categorical_node_match(["color", "size"], ["red", 2]) + +""" + + +def categorical_node_match(attr, default): + if isinstance(attr, str): + + def match(data1, data2): + return data1.get(attr, default) == data2.get(attr, default) + + else: + attrs = list(zip(attr, default)) # Python 3 + + def match(data1, data2): + return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs) + + return match + + +categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match") + + +def categorical_multiedge_match(attr, default): + if isinstance(attr, str): + + def match(datasets1, datasets2): + values1 = {data.get(attr, default) for data in datasets1.values()} + values2 = {data.get(attr, default) for data in datasets2.values()} + return values1 == values2 + + else: + attrs = list(zip(attr, default)) # Python 3 + + def match(datasets1, datasets2): + values1 = set() + for data1 in datasets1.values(): + x = tuple(data1.get(attr, d) for attr, d in attrs) + values1.add(x) + values2 = set() + for data2 in datasets2.values(): + x = tuple(data2.get(attr, d) for attr, d in attrs) + values2.add(x) + return values1 == values2 + + return match + + +# Docstrings for categorical functions. +categorical_node_match.__doc__ = categorical_doc +categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge") +tmpdoc = categorical_doc.replace("node", "edge") +tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match") +categorical_multiedge_match.__doc__ = tmpdoc + + +numerical_doc = """ +Returns a comparison function for a numerical node attribute. + +The value(s) of the attr(s) must be numerical and sortable. If the +sorted list of values from G1 and G2 are the same within some +tolerance, then the constructed function returns True. + +Parameters +---------- +attr : string | list + The numerical node attribute to compare, or a list of numerical + node attributes to compare. +default : value | list + The default value for the numerical node attribute, or a list of + default values for the numerical node attributes. +rtol : float + The relative error tolerance. +atol : float + The absolute error tolerance. + +Returns +------- +match : function + The customized, numerical `node_match` function. + +Examples +-------- +>>> import networkx.algorithms.isomorphism as iso +>>> nm = iso.numerical_node_match("weight", 1.0) +>>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5]) + +""" + + +def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): + if isinstance(attr, str): + + def match(data1, data2): + return math.isclose( + data1.get(attr, default), + data2.get(attr, default), + rel_tol=rtol, + abs_tol=atol, + ) + + else: + attrs = list(zip(attr, default)) # Python 3 + + def match(data1, data2): + values1 = [data1.get(attr, d) for attr, d in attrs] + values2 = [data2.get(attr, d) for attr, d in attrs] + return allclose(values1, values2, rtol=rtol, atol=atol) + + return match + + +numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match") + + +def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): + if isinstance(attr, str): + + def match(datasets1, datasets2): + values1 = sorted(data.get(attr, default) for data in datasets1.values()) + values2 = sorted(data.get(attr, default) for data in datasets2.values()) + return allclose(values1, values2, rtol=rtol, atol=atol) + + else: + attrs = list(zip(attr, default)) # Python 3 + + def match(datasets1, datasets2): + values1 = [] + for data1 in datasets1.values(): + x = tuple(data1.get(attr, d) for attr, d in attrs) + values1.append(x) + values2 = [] + for data2 in datasets2.values(): + x = tuple(data2.get(attr, d) for attr, d in attrs) + values2.append(x) + values1.sort() + values2.sort() + for xi, yi in zip(values1, values2): + if not allclose(xi, yi, rtol=rtol, atol=atol): + return False + else: + return True + + return match + + +# Docstrings for numerical functions. +numerical_node_match.__doc__ = numerical_doc +numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge") +tmpdoc = numerical_doc.replace("node", "edge") +tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match") +numerical_multiedge_match.__doc__ = tmpdoc + + +generic_doc = """ +Returns a comparison function for a generic attribute. + +The value(s) of the attr(s) are compared using the specified +operators. If all the attributes are equal, then the constructed +function returns True. + +Parameters +---------- +attr : string | list + The node attribute to compare, or a list of node attributes + to compare. +default : value | list + The default value for the node attribute, or a list of + default values for the node attributes. +op : callable | list + The operator to use when comparing attribute values, or a list + of operators to use when comparing values for each attribute. + +Returns +------- +match : function + The customized, generic `node_match` function. + +Examples +-------- +>>> from operator import eq +>>> from math import isclose +>>> from networkx.algorithms.isomorphism import generic_node_match +>>> nm = generic_node_match("weight", 1.0, isclose) +>>> nm = generic_node_match("color", "red", eq) +>>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq]) + +""" + + +def generic_node_match(attr, default, op): + if isinstance(attr, str): + + def match(data1, data2): + return op(data1.get(attr, default), data2.get(attr, default)) + + else: + attrs = list(zip(attr, default, op)) # Python 3 + + def match(data1, data2): + for attr, d, operator in attrs: + if not operator(data1.get(attr, d), data2.get(attr, d)): + return False + else: + return True + + return match + + +generic_edge_match = copyfunc(generic_node_match, "generic_edge_match") + + +def generic_multiedge_match(attr, default, op): + """Returns a comparison function for a generic attribute. + + The value(s) of the attr(s) are compared using the specified + operators. If all the attributes are equal, then the constructed + function returns True. Potentially, the constructed edge_match + function can be slow since it must verify that no isomorphism + exists between the multiedges before it returns False. + + Parameters + ---------- + attr : string | list + The edge attribute to compare, or a list of node attributes + to compare. + default : value | list + The default value for the edge attribute, or a list of + default values for the edgeattributes. + op : callable | list + The operator to use when comparing attribute values, or a list + of operators to use when comparing values for each attribute. + + Returns + ------- + match : function + The customized, generic `edge_match` function. + + Examples + -------- + >>> from operator import eq + >>> from math import isclose + >>> from networkx.algorithms.isomorphism import generic_node_match + >>> nm = generic_node_match("weight", 1.0, isclose) + >>> nm = generic_node_match("color", "red", eq) + >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq]) + + """ + + # This is slow, but generic. + # We must test every possible isomorphism between the edges. + if isinstance(attr, str): + attr = [attr] + default = [default] + op = [op] + attrs = list(zip(attr, default)) # Python 3 + + def match(datasets1, datasets2): + values1 = [] + for data1 in datasets1.values(): + x = tuple(data1.get(attr, d) for attr, d in attrs) + values1.append(x) + values2 = [] + for data2 in datasets2.values(): + x = tuple(data2.get(attr, d) for attr, d in attrs) + values2.append(x) + for vals2 in permutations(values2): + for xi, yi in zip(values1, vals2): + if not all(map(lambda x, y, z: z(x, y), xi, yi, op)): + # This is not an isomorphism, go to next permutation. + break + else: + # Then we found an isomorphism. + return True + else: + # Then there are no isomorphisms between the multiedges. + return False + + return match + + +# Docstrings for numerical functions. +generic_node_match.__doc__ = generic_doc +generic_edge_match.__doc__ = generic_doc.replace("node", "edge") diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py new file mode 100644 index 0000000000000000000000000000000000000000..b78ecf149f5a7e741f6c49d479c09194c0ce2107 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py @@ -0,0 +1,304 @@ +""" +***************************** +Time-respecting VF2 Algorithm +***************************** + +An extension of the VF2 algorithm for time-respecting graph isomorphism +testing in temporal graphs. + +A temporal graph is one in which edges contain a datetime attribute, +denoting when interaction occurred between the incident nodes. A +time-respecting subgraph of a temporal graph is a subgraph such that +all interactions incident to a node occurred within a time threshold, +delta, of each other. A directed time-respecting subgraph has the +added constraint that incoming interactions to a node must precede +outgoing interactions from the same node - this enforces a sense of +directed flow. + +Introduction +------------ + +The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher +extend the GraphMatcher and DiGraphMatcher classes, respectively, +to include temporal constraints on matches. This is achieved through +a semantic check, via the semantic_feasibility() function. + +As well as including G1 (the graph in which to seek embeddings) and +G2 (the subgraph structure of interest), the name of the temporal +attribute on the edges and the time threshold, delta, must be supplied +as arguments to the matching constructors. + +A delta of zero is the strictest temporal constraint on the match - +only embeddings in which all interactions occur at the same time will +be returned. A delta of one day will allow embeddings in which +adjacent interactions occur up to a day apart. + +Examples +-------- + +Examples will be provided when the datetime type has been incorporated. + + +Temporal Subgraph Isomorphism +----------------------------- + +A brief discussion of the somewhat diverse current literature will be +included here. + +References +---------- + +[1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In: +The 2013 IEEE/ACM International Conference on Advances in Social +Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013: +pages 1451 - 1452. [65] + +For a discussion of the literature on temporal networks: + +[3] P. Holme and J. Saramaki. Temporal networks. Physics Reports, +519(3):97–125, 2012. + +Notes +----- + +Handles directed and undirected graphs and graphs with parallel edges. + +""" + +import networkx as nx + +from .isomorphvf2 import DiGraphMatcher, GraphMatcher + +__all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"] + + +class TimeRespectingGraphMatcher(GraphMatcher): + def __init__(self, G1, G2, temporal_attribute_name, delta): + """Initialize TimeRespectingGraphMatcher. + + G1 and G2 should be nx.Graph or nx.MultiGraph instances. + + Examples + -------- + To create a TimeRespectingGraphMatcher which checks for + syntactic and semantic feasibility: + + >>> from networkx.algorithms import isomorphism + >>> from datetime import timedelta + >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph())) + + >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph())) + + >>> GM = isomorphism.TimeRespectingGraphMatcher(G1, G2, "date", timedelta(days=1)) + """ + self.temporal_attribute_name = temporal_attribute_name + self.delta = delta + super().__init__(G1, G2) + + def one_hop(self, Gx, Gx_node, neighbors): + """ + Edges one hop out from a node in the mapping should be + time-respecting with respect to each other. + """ + dates = [] + for n in neighbors: + if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary. + dates.append(Gx[Gx_node][n][self.temporal_attribute_name]) + else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. + for edge in Gx[Gx_node][ + n + ].values(): # Iterates all edges between node pair. + dates.append(edge[self.temporal_attribute_name]) + if any(x is None for x in dates): + raise ValueError("Datetime not supplied for at least one edge.") + return not dates or max(dates) - min(dates) <= self.delta + + def two_hop(self, Gx, core_x, Gx_node, neighbors): + """ + Paths of length 2 from Gx_node should be time-respecting. + """ + return all( + self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node]) + for v in neighbors + ) + + def semantic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is semantically + feasible. + + Any subclass which redefines semantic_feasibility() must + maintain the self.tests if needed, to keep the match() method + functional. Implementations should consider multigraphs. + """ + neighbors = [n for n in self.G1[G1_node] if n in self.core_1] + if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node. + return False + if not self.two_hop(self.G1, self.core_1, G1_node, neighbors): + return False + # Otherwise, this node is semantically feasible! + return True + + +class TimeRespectingDiGraphMatcher(DiGraphMatcher): + def __init__(self, G1, G2, temporal_attribute_name, delta): + """Initialize TimeRespectingDiGraphMatcher. + + G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances. + + Examples + -------- + To create a TimeRespectingDiGraphMatcher which checks for + syntactic and semantic feasibility: + + >>> from networkx.algorithms import isomorphism + >>> from datetime import timedelta + >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) + + >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) + + >>> GM = isomorphism.TimeRespectingDiGraphMatcher(G1, G2, "date", timedelta(days=1)) + """ + self.temporal_attribute_name = temporal_attribute_name + self.delta = delta + super().__init__(G1, G2) + + def get_pred_dates(self, Gx, Gx_node, core_x, pred): + """ + Get the dates of edges from predecessors. + """ + pred_dates = [] + if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary. + for n in pred: + pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name]) + else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. + for n in pred: + for edge in Gx[n][ + Gx_node + ].values(): # Iterates all edge data between node pair. + pred_dates.append(edge[self.temporal_attribute_name]) + return pred_dates + + def get_succ_dates(self, Gx, Gx_node, core_x, succ): + """ + Get the dates of edges to successors. + """ + succ_dates = [] + if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary. + for n in succ: + succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name]) + else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. + for n in succ: + for edge in Gx[Gx_node][ + n + ].values(): # Iterates all edge data between node pair. + succ_dates.append(edge[self.temporal_attribute_name]) + return succ_dates + + def one_hop(self, Gx, Gx_node, core_x, pred, succ): + """ + The ego node. + """ + pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred) + succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ) + return self.test_one(pred_dates, succ_dates) and self.test_two( + pred_dates, succ_dates + ) + + def two_hop_pred(self, Gx, Gx_node, core_x, pred): + """ + The predecessors of the ego node. + """ + return all( + self.one_hop( + Gx, + p, + core_x, + self.preds(Gx, core_x, p), + self.succs(Gx, core_x, p, Gx_node), + ) + for p in pred + ) + + def two_hop_succ(self, Gx, Gx_node, core_x, succ): + """ + The successors of the ego node. + """ + return all( + self.one_hop( + Gx, + s, + core_x, + self.preds(Gx, core_x, s, Gx_node), + self.succs(Gx, core_x, s), + ) + for s in succ + ) + + def preds(self, Gx, core_x, v, Gx_node=None): + pred = [n for n in Gx.predecessors(v) if n in core_x] + if Gx_node: + pred.append(Gx_node) + return pred + + def succs(self, Gx, core_x, v, Gx_node=None): + succ = [n for n in Gx.successors(v) if n in core_x] + if Gx_node: + succ.append(Gx_node) + return succ + + def test_one(self, pred_dates, succ_dates): + """ + Edges one hop out from Gx_node in the mapping should be + time-respecting with respect to each other, regardless of + direction. + """ + time_respecting = True + dates = pred_dates + succ_dates + + if any(x is None for x in dates): + raise ValueError("Date or datetime not supplied for at least one edge.") + + dates.sort() # Small to large. + if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta): + time_respecting = False + return time_respecting + + def test_two(self, pred_dates, succ_dates): + """ + Edges from a dual Gx_node in the mapping should be ordered in + a time-respecting manner. + """ + time_respecting = True + pred_dates.sort() + succ_dates.sort() + # First out before last in; negative of the necessary condition for time-respect. + if ( + 0 < len(succ_dates) + and 0 < len(pred_dates) + and succ_dates[0] < pred_dates[-1] + ): + time_respecting = False + return time_respecting + + def semantic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is semantically + feasible. + + Any subclass which redefines semantic_feasibility() must + maintain the self.tests if needed, to keep the match() method + functional. Implementations should consider multigraphs. + """ + pred, succ = ( + [n for n in self.G1.predecessors(G1_node) if n in self.core_1], + [n for n in self.G1.successors(G1_node) if n in self.core_1], + ) + if not self.one_hop( + self.G1, G1_node, self.core_1, pred, succ + ): # Fail fast on first node. + return False + if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred): + return False + if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ): + return False + # Otherwise, this node is semantically feasible! + return True diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8ce50b05f3d2f3e99e7f42a37524f09b2e680c3f Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b3f54ee92af123470de5a10ae38d5192d5da527 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e526adff1a0956341111e9cdf6c5f9074dffcc0e Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a13b27df84d1587025bb1b4abad90d8f81bca40 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78f5f11cb808ee45792cfb3bbcc96743dcb402bd Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0262739eb5b13ff9471447f9506d3cf39ab9e510 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46115abeb1c5d55f1a284d720b7d7be9338fce28 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf07a37bfb5e8e2f4ede1f2ee57ab957642cf1e3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6cd463d4dee009aa43d162079292596da91f4134 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..89ca65979662cf255a6cddf41a42d58cd17ed791 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 new file mode 100644 index 0000000000000000000000000000000000000000..dac54f0038c70e2d359ffa68de3c7641b46db21a Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 new file mode 100644 index 0000000000000000000000000000000000000000..6c6af680031b4f30fc6da946d0344b5c27e5f05e Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 new file mode 100644 index 0000000000000000000000000000000000000000..60c3a3ce1bdb54a61ead043f0adaf24b1fe24e93 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 new file mode 100644 index 0000000000000000000000000000000000000000..0236872094d4c73b0bb132165ce3ec4d1054f5f5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py new file mode 100644 index 0000000000000000000000000000000000000000..00641519978edd6676396dafcb1d1f33b5490b82 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py @@ -0,0 +1,327 @@ +""" + Tests for ISMAGS isomorphism algorithm. +""" + +import pytest + +import networkx as nx +from networkx.algorithms import isomorphism as iso + + +def _matches_to_sets(matches): + """ + Helper function to facilitate comparing collections of dictionaries in + which order does not matter. + """ + return {frozenset(m.items()) for m in matches} + + +class TestSelfIsomorphism: + data = [ + ( + [ + (0, {"name": "a"}), + (1, {"name": "a"}), + (2, {"name": "b"}), + (3, {"name": "b"}), + (4, {"name": "a"}), + (5, {"name": "a"}), + ], + [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)], + ), + (range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]), + ( + [], + [ + (0, 1), + (1, 2), + (2, 3), + (3, 4), + (4, 5), + (5, 0), + (0, 6), + (6, 7), + (2, 8), + (8, 9), + (4, 10), + (10, 11), + ], + ), + ([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]), + ] + + def test_self_isomorphism(self): + """ + For some small, symmetric graphs, make sure that 1) they are isomorphic + to themselves, and 2) that only the identity mapping is found. + """ + for node_data, edge_data in self.data: + graph = nx.Graph() + graph.add_nodes_from(node_data) + graph.add_edges_from(edge_data) + + ismags = iso.ISMAGS( + graph, graph, node_match=iso.categorical_node_match("name", None) + ) + assert ismags.is_isomorphic() + assert ismags.subgraph_is_isomorphic() + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in graph.nodes} + ] + + def test_edgecase_self_isomorphism(self): + """ + This edgecase is one of the cases in which it is hard to find all + symmetry elements. + """ + graph = nx.Graph() + nx.add_path(graph, range(5)) + graph.add_edges_from([(2, 5), (5, 6)]) + + ismags = iso.ISMAGS(graph, graph) + ismags_answer = list(ismags.find_isomorphisms(True)) + assert ismags_answer == [{n: n for n in graph.nodes}] + + graph = nx.relabel_nodes(graph, {0: 0, 1: 1, 2: 2, 3: 3, 4: 6, 5: 4, 6: 5}) + ismags = iso.ISMAGS(graph, graph) + ismags_answer = list(ismags.find_isomorphisms(True)) + assert ismags_answer == [{n: n for n in graph.nodes}] + + def test_directed_self_isomorphism(self): + """ + For some small, directed, symmetric graphs, make sure that 1) they are + isomorphic to themselves, and 2) that only the identity mapping is + found. + """ + for node_data, edge_data in self.data: + graph = nx.Graph() + graph.add_nodes_from(node_data) + graph.add_edges_from(edge_data) + + ismags = iso.ISMAGS( + graph, graph, node_match=iso.categorical_node_match("name", None) + ) + assert ismags.is_isomorphic() + assert ismags.subgraph_is_isomorphic() + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in graph.nodes} + ] + + +class TestSubgraphIsomorphism: + def test_isomorphism(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(4)) + + g2 = nx.Graph() + nx.add_cycle(g2, range(4)) + g2.add_edges_from(list(zip(g2, range(4, 8)))) + ismags = iso.ISMAGS(g2, g1) + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in g1.nodes} + ] + + def test_isomorphism2(self): + g1 = nx.Graph() + nx.add_path(g1, range(3)) + + g2 = g1.copy() + g2.add_edge(1, 3) + + ismags = iso.ISMAGS(g2, g1) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [ + {0: 0, 1: 1, 2: 2}, + {0: 0, 1: 1, 3: 2}, + {2: 0, 1: 1, 3: 2}, + ] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [ + {0: 2, 1: 1, 2: 0}, + {0: 2, 1: 1, 3: 0}, + {2: 2, 1: 1, 3: 0}, + ] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + def test_labeled_nodes(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(3)) + g1.nodes[1]["attr"] = True + + g2 = g1.copy() + g2.add_edge(1, 3) + ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [{0: 0, 1: 1, 2: 2}] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [{0: 2, 1: 1, 2: 0}] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + def test_labeled_edges(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(3)) + g1.edges[1, 2]["attr"] = True + + g2 = g1.copy() + g2.add_edge(1, 3) + ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [{0: 0, 1: 1, 2: 2}] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [{1: 2, 0: 0, 2: 1}] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + +class TestWikipediaExample: + # Nodes 'a', 'b', 'c' and 'd' form a column. + # Nodes 'g', 'h', 'i' and 'j' form a column. + g1edges = [ + ["a", "g"], + ["a", "h"], + ["a", "i"], + ["b", "g"], + ["b", "h"], + ["b", "j"], + ["c", "g"], + ["c", "i"], + ["c", "j"], + ["d", "h"], + ["d", "i"], + ["d", "j"], + ] + + # Nodes 1,2,3,4 form the clockwise corners of a large square. + # Nodes 5,6,7,8 form the clockwise corners of a small square + g2edges = [ + [1, 2], + [2, 3], + [3, 4], + [4, 1], + [5, 6], + [6, 7], + [7, 8], + [8, 5], + [1, 5], + [2, 6], + [3, 7], + [4, 8], + ] + + def test_graph(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from(self.g2edges) + gm = iso.ISMAGS(g1, g2) + assert gm.is_isomorphic() + + +class TestLargestCommonSubgraph: + def test_mcis(self): + # Example graphs from DOI: 10.1002/spe.588 + graph1 = nx.Graph() + graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)]) + graph1.nodes[1]["color"] = 0 + + graph2 = nx.Graph() + graph2.add_edges_from( + [(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)] + ) + graph2.nodes[1]["color"] = 1 + graph2.nodes[6]["color"] = 2 + graph2.nodes[7]["color"] = 2 + + ismags = iso.ISMAGS( + graph1, graph2, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags.subgraph_isomorphisms_iter(True)) == [] + assert list(ismags.subgraph_isomorphisms_iter(False)) == [] + found_mcis = _matches_to_sets(ismags.largest_common_subgraph()) + expected = _matches_to_sets( + [{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}] + ) + assert expected == found_mcis + + ismags = iso.ISMAGS( + graph2, graph1, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags.subgraph_isomorphisms_iter(True)) == [] + assert list(ismags.subgraph_isomorphisms_iter(False)) == [] + found_mcis = _matches_to_sets(ismags.largest_common_subgraph()) + # Same answer, but reversed. + expected = _matches_to_sets( + [{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}] + ) + assert expected == found_mcis + + def test_symmetry_mcis(self): + graph1 = nx.Graph() + nx.add_path(graph1, range(4)) + + graph2 = nx.Graph() + nx.add_path(graph2, range(3)) + graph2.add_edge(1, 3) + + # Only the symmetry of graph2 is taken into account here. + ismags1 = iso.ISMAGS( + graph1, graph2, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags1.subgraph_isomorphisms_iter(True)) == [] + found_mcis = _matches_to_sets(ismags1.largest_common_subgraph()) + expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}]) + assert expected == found_mcis + + # Only the symmetry of graph1 is taken into account here. + ismags2 = iso.ISMAGS( + graph2, graph1, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags2.subgraph_isomorphisms_iter(True)) == [] + found_mcis = _matches_to_sets(ismags2.largest_common_subgraph()) + expected = _matches_to_sets( + [ + {3: 2, 0: 0, 1: 1}, + {2: 0, 0: 2, 1: 1}, + {3: 0, 0: 2, 1: 1}, + {3: 0, 1: 1, 2: 2}, + {0: 0, 1: 1, 2: 2}, + {2: 0, 3: 2, 1: 1}, + ] + ) + + assert expected == found_mcis + + found_mcis1 = _matches_to_sets(ismags1.largest_common_subgraph(False)) + found_mcis2 = ismags2.largest_common_subgraph(False) + found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2] + found_mcis2 = _matches_to_sets(found_mcis2) + + expected = _matches_to_sets( + [ + {3: 2, 1: 3, 2: 1}, + {2: 0, 0: 2, 1: 1}, + {1: 2, 3: 3, 2: 1}, + {3: 0, 1: 3, 2: 1}, + {0: 2, 2: 3, 1: 1}, + {3: 0, 1: 2, 2: 1}, + {2: 0, 0: 3, 1: 1}, + {0: 0, 2: 3, 1: 1}, + {1: 0, 3: 3, 2: 1}, + {1: 0, 3: 2, 2: 1}, + {0: 3, 1: 1, 2: 2}, + {0: 0, 1: 1, 2: 2}, + ] + ) + assert expected == found_mcis1 + assert expected == found_mcis2 diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py new file mode 100644 index 0000000000000000000000000000000000000000..548af808ffd93a32dfe91b7b372b6c3e45a206bf --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py @@ -0,0 +1,48 @@ +import pytest + +import networkx as nx +from networkx.algorithms import isomorphism as iso + + +class TestIsomorph: + @classmethod + def setup_class(cls): + cls.G1 = nx.Graph() + cls.G2 = nx.Graph() + cls.G3 = nx.Graph() + cls.G4 = nx.Graph() + cls.G5 = nx.Graph() + cls.G6 = nx.Graph() + cls.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]]) + cls.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]]) + cls.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]]) + cls.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]]) + cls.G5.add_edges_from([[1, 2], [1, 3]]) + cls.G6.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50], [20, 50]]) + + def test_could_be_isomorphic(self): + assert iso.could_be_isomorphic(self.G1, self.G2) + assert iso.could_be_isomorphic(self.G1, self.G3) + assert not iso.could_be_isomorphic(self.G1, self.G4) + assert iso.could_be_isomorphic(self.G3, self.G2) + assert not iso.could_be_isomorphic(self.G1, self.G6) + + def test_fast_could_be_isomorphic(self): + assert iso.fast_could_be_isomorphic(self.G3, self.G2) + assert not iso.fast_could_be_isomorphic(self.G3, self.G5) + assert not iso.fast_could_be_isomorphic(self.G1, self.G6) + + def test_faster_could_be_isomorphic(self): + assert iso.faster_could_be_isomorphic(self.G3, self.G2) + assert not iso.faster_could_be_isomorphic(self.G3, self.G5) + assert not iso.faster_could_be_isomorphic(self.G1, self.G6) + + def test_is_isomorphic(self): + assert iso.is_isomorphic(self.G1, self.G2) + assert not iso.is_isomorphic(self.G1, self.G4) + assert iso.is_isomorphic(self.G1.to_directed(), self.G2.to_directed()) + assert not iso.is_isomorphic(self.G1.to_directed(), self.G4.to_directed()) + with pytest.raises( + nx.NetworkXError, match="Graphs G1 and G2 are not of the same type." + ): + iso.is_isomorphic(self.G1.to_directed(), self.G1) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py new file mode 100644 index 0000000000000000000000000000000000000000..f658bcae4e3d0fd1945f67fd7e809933d97d4ee9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py @@ -0,0 +1,410 @@ +""" + Tests for VF2 isomorphism algorithm. +""" + +import importlib.resources +import os +import random +import struct + +import networkx as nx +from networkx.algorithms import isomorphism as iso + + +class TestWikipediaExample: + # Source: https://en.wikipedia.org/wiki/Graph_isomorphism + + # Nodes 'a', 'b', 'c' and 'd' form a column. + # Nodes 'g', 'h', 'i' and 'j' form a column. + g1edges = [ + ["a", "g"], + ["a", "h"], + ["a", "i"], + ["b", "g"], + ["b", "h"], + ["b", "j"], + ["c", "g"], + ["c", "i"], + ["c", "j"], + ["d", "h"], + ["d", "i"], + ["d", "j"], + ] + + # Nodes 1,2,3,4 form the clockwise corners of a large square. + # Nodes 5,6,7,8 form the clockwise corners of a small square + g2edges = [ + [1, 2], + [2, 3], + [3, 4], + [4, 1], + [5, 6], + [6, 7], + [7, 8], + [8, 5], + [1, 5], + [2, 6], + [3, 7], + [4, 8], + ] + + def test_graph(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from(self.g2edges) + gm = iso.GraphMatcher(g1, g2) + assert gm.is_isomorphic() + # Just testing some cases + assert gm.subgraph_is_monomorphic() + + mapping = sorted(gm.mapping.items()) + + # this mapping is only one of the possibilities + # so this test needs to be reconsidered + # isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8), + # ('g', 2), ('h', 5), ('i', 4), ('j', 7)] + # assert_equal(mapping, isomap) + + def test_subgraph(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from(self.g2edges) + g3 = g2.subgraph([1, 2, 3, 4]) + gm = iso.GraphMatcher(g1, g3) + assert gm.subgraph_is_isomorphic() + + def test_subgraph_mono(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from([[1, 2], [2, 3], [3, 4]]) + gm = iso.GraphMatcher(g1, g2) + assert gm.subgraph_is_monomorphic() + + +class TestVF2GraphDB: + # https://web.archive.org/web/20090303210205/http://amalfi.dis.unina.it/graph/db/ + + @staticmethod + def create_graph(filename): + """Creates a Graph instance from the filename.""" + + # The file is assumed to be in the format from the VF2 graph database. + # Each file is composed of 16-bit numbers (unsigned short int). + # So we will want to read 2 bytes at a time. + + # We can read the number as follows: + # number = struct.unpack(' 0 + assert check_isomorphism(t1, t2, isomorphism) + + +# run positive_single_tree over all the +# non-isomorphic trees for k from 4 to maxk +# k = 4 is the first level that has more than 1 non-isomorphic tree +# k = 13 takes about 2.86 seconds to run on my laptop +# larger values run slow down significantly +# as the number of trees grows rapidly +def test_positive(maxk=14): + print("positive test") + + for k in range(2, maxk + 1): + start_time = time.time() + trial = 0 + for t in nx.nonisomorphic_trees(k): + positive_single_tree(t) + trial += 1 + print(k, trial, time.time() - start_time) + + +# test the trivial case of a single node in each tree +# note that nonisomorphic_trees doesn't work for k = 1 +def test_trivial(): + print("trivial test") + + # back to an undirected graph + t1 = nx.Graph() + t1.add_node("a") + root1 = "a" + + t2 = nx.Graph() + t2.add_node("n") + root2 = "n" + + isomorphism = rooted_tree_isomorphism(t1, root1, t2, root2) + + assert isomorphism == [("a", "n")] + + assert check_isomorphism(t1, t2, isomorphism) + + +# test another trivial case where the two graphs have +# different numbers of nodes +def test_trivial_2(): + print("trivial test 2") + + edges_1 = [("a", "b"), ("a", "c")] + + edges_2 = [("v", "y")] + + t1 = nx.Graph() + t1.add_edges_from(edges_1) + + t2 = nx.Graph() + t2.add_edges_from(edges_2) + + isomorphism = tree_isomorphism(t1, t2) + + # they cannot be isomorphic, + # since they have different numbers of nodes + assert isomorphism == [] + + +# the function nonisomorphic_trees generates all the non-isomorphic +# trees of a given size. Take each pair of these and verify that +# they are not isomorphic +# k = 4 is the first level that has more than 1 non-isomorphic tree +# k = 11 takes about 4.76 seconds to run on my laptop +# larger values run slow down significantly +# as the number of trees grows rapidly +def test_negative(maxk=11): + print("negative test") + + for k in range(4, maxk + 1): + test_trees = list(nx.nonisomorphic_trees(k)) + start_time = time.time() + trial = 0 + for i in range(len(test_trees) - 1): + for j in range(i + 1, len(test_trees)): + trial += 1 + assert tree_isomorphism(test_trees[i], test_trees[j]) == [] + print(k, trial, time.time() - start_time) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py new file mode 100644 index 0000000000000000000000000000000000000000..5f3fb901cc8afdc2a73cfb7001538db49371eaf4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py @@ -0,0 +1,1608 @@ +import itertools as it + +import pytest + +import networkx as nx +from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism + +labels_same = ["blue"] + +labels_many = [ + "white", + "red", + "blue", + "green", + "orange", + "black", + "purple", + "yellow", + "brown", + "cyan", + "solarized", + "pink", + "none", +] + + +class TestPreCheck: + def test_first_graph_empty(self): + G1 = nx.Graph() + G2 = nx.Graph([(0, 1), (1, 2)]) + assert not vf2pp_is_isomorphic(G1, G2) + + def test_second_graph_empty(self): + G1 = nx.Graph([(0, 1), (1, 2)]) + G2 = nx.Graph() + assert not vf2pp_is_isomorphic(G1, G2) + + def test_different_order1(self): + G1 = nx.path_graph(5) + G2 = nx.path_graph(6) + assert not vf2pp_is_isomorphic(G1, G2) + + def test_different_order2(self): + G1 = nx.barbell_graph(100, 20) + G2 = nx.barbell_graph(101, 20) + assert not vf2pp_is_isomorphic(G1, G2) + + def test_different_order3(self): + G1 = nx.complete_graph(7) + G2 = nx.complete_graph(8) + assert not vf2pp_is_isomorphic(G1, G2) + + def test_different_degree_sequences1(self): + G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4)]) + G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4), (2, 5)]) + assert not vf2pp_is_isomorphic(G1, G2) + + G2.remove_node(3) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label") + + assert vf2pp_is_isomorphic(G1, G2) + + def test_different_degree_sequences2(self): + G1 = nx.Graph( + [ + (0, 1), + (1, 2), + (0, 2), + (2, 3), + (3, 4), + (4, 5), + (5, 6), + (6, 3), + (4, 7), + (7, 8), + (8, 3), + ] + ) + G2 = G1.copy() + G2.add_edge(8, 0) + assert not vf2pp_is_isomorphic(G1, G2) + + G1.add_edge(6, 1) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label") + + assert vf2pp_is_isomorphic(G1, G2) + + def test_different_degree_sequences3(self): + G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)]) + G2 = nx.Graph( + [(0, 1), (0, 6), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)] + ) + assert not vf2pp_is_isomorphic(G1, G2) + + G1.add_edge(3, 5) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label") + + assert vf2pp_is_isomorphic(G1, G2) + + def test_label_distribution(self): + G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)]) + G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)]) + + colors1 = ["blue", "blue", "blue", "yellow", "black", "purple", "purple"] + colors2 = ["blue", "blue", "yellow", "yellow", "black", "purple", "purple"] + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors1[::-1]))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(colors2[::-1]))), "label") + + assert not vf2pp_is_isomorphic(G1, G2, node_label="label") + G2.nodes[3]["label"] = "blue" + assert vf2pp_is_isomorphic(G1, G2, node_label="label") + + +class TestAllGraphTypesEdgeCases: + @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph)) + def test_both_graphs_empty(self, graph_type): + G = graph_type() + H = graph_type() + assert vf2pp_isomorphism(G, H) is None + + G.add_node(0) + + assert vf2pp_isomorphism(G, H) is None + assert vf2pp_isomorphism(H, G) is None + + H.add_node(0) + assert vf2pp_isomorphism(G, H) == {0: 0} + + @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph)) + def test_first_graph_empty(self, graph_type): + G = graph_type() + H = graph_type([(0, 1)]) + assert vf2pp_isomorphism(G, H) is None + + @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph)) + def test_second_graph_empty(self, graph_type): + G = graph_type([(0, 1)]) + H = graph_type() + assert vf2pp_isomorphism(G, H) is None + + +class TestGraphISOVF2pp: + def test_custom_graph1_same_labels(self): + G1 = nx.Graph() + + mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"} + edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Add edge making G1 symmetrical + G1.add_edge(3, 7) + G1.nodes[7]["label"] = "blue" + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Make G2 isomorphic to G1 + G2.add_edges_from([(mapped[3], "X"), (mapped[6], mapped[5])]) + G1.add_edge(4, 7) + G2.nodes["X"]["label"] = "blue" + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Re-structure maintaining isomorphism + G1.remove_edges_from([(1, 4), (1, 3)]) + G2.remove_edges_from([(mapped[1], mapped[5]), (mapped[1], mapped[2])]) + assert vf2pp_isomorphism(G1, G2, node_label="label") + + def test_custom_graph1_different_labels(self): + G1 = nx.Graph() + + mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"} + edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + def test_custom_graph2_same_labels(self): + G1 = nx.Graph() + + mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"} + edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Obtain two isomorphic subgraphs from the graph + G2.remove_edge(mapped[1], mapped[2]) + G2.add_edge(mapped[1], mapped[4]) + H1 = nx.Graph(G1.subgraph([2, 3, 4, 7])) + H2 = nx.Graph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]])) + assert vf2pp_isomorphism(H1, H2, node_label="label") + + # Add edges maintaining isomorphism + H1.add_edges_from([(3, 7), (4, 7)]) + H2.add_edges_from([(mapped[1], mapped[6]), (mapped[4], mapped[6])]) + assert vf2pp_isomorphism(H1, H2, node_label="label") + + def test_custom_graph2_different_labels(self): + G1 = nx.Graph() + + mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"} + edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + + # Adding new nodes + G1.add_node(0) + G2.add_node("Z") + G1.nodes[0]["label"] = G1.nodes[1]["label"] + G2.nodes["Z"]["label"] = G1.nodes[1]["label"] + mapped.update({0: "Z"}) + + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + # Change the color of one of the nodes + G2.nodes["Z"]["label"] = G1.nodes[2]["label"] + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Add an extra edge + G1.nodes[0]["label"] = "blue" + G2.nodes["Z"]["label"] = "blue" + G1.add_edge(0, 1) + + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Add extra edge to both + G2.add_edge("Z", "A") + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + def test_custom_graph3_same_labels(self): + G1 = nx.Graph() + + mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2} + edges1 = [ + (1, 2), + (1, 3), + (2, 3), + (3, 4), + (4, 5), + (4, 7), + (4, 9), + (5, 8), + (8, 9), + (5, 6), + (6, 7), + (5, 2), + ] + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Connect nodes maintaining symmetry + G1.add_edges_from([(6, 9), (7, 8)]) + G2.add_edges_from([(mapped[6], mapped[8]), (mapped[7], mapped[9])]) + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Make isomorphic + G1.add_edges_from([(6, 8), (7, 9)]) + G2.add_edges_from([(mapped[6], mapped[9]), (mapped[7], mapped[8])]) + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Connect more nodes + G1.add_edges_from([(2, 7), (3, 6)]) + G2.add_edges_from([(mapped[2], mapped[7]), (mapped[3], mapped[6])]) + G1.add_node(10) + G2.add_node("Z") + G1.nodes[10]["label"] = "blue" + G2.nodes["Z"]["label"] = "blue" + + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Connect the newly added node, to opposite sides of the graph + G1.add_edges_from([(10, 1), (10, 5), (10, 8)]) + G2.add_edges_from([("Z", mapped[1]), ("Z", mapped[4]), ("Z", mapped[9])]) + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Get two subgraphs that are not isomorphic but are easy to make + H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10])) + H2 = nx.Graph( + G2.subgraph( + [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"] + ) + ) + assert vf2pp_isomorphism(H1, H2, node_label="label") is None + + # Restructure both to make them isomorphic + H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)]) + H2.add_edges_from( + [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])] + ) + assert vf2pp_isomorphism(H1, H2, node_label="label") + + # Add edges with opposite direction in each Graph + H1.add_edge(3, 5) + H2.add_edge(mapped[5], mapped[7]) + assert vf2pp_isomorphism(H1, H2, node_label="label") is None + + def test_custom_graph3_different_labels(self): + G1 = nx.Graph() + + mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2} + edges1 = [ + (1, 2), + (1, 3), + (2, 3), + (3, 4), + (4, 5), + (4, 7), + (4, 9), + (5, 8), + (8, 9), + (5, 6), + (6, 7), + (5, 2), + ] + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + # Add extra edge to G1 + G1.add_edge(1, 7) + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Compensate in G2 + G2.add_edge(9, 1) + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + # Add extra node + G1.add_node("A") + G2.add_node("K") + G1.nodes["A"]["label"] = "green" + G2.nodes["K"]["label"] = "green" + mapped.update({"A": "K"}) + + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + # Connect A to one side of G1 and K to the opposite + G1.add_edge("A", 6) + G2.add_edge("K", 5) + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Make the graphs symmetrical + G1.add_edge(1, 5) + G1.add_edge(2, 9) + G2.add_edge(9, 3) + G2.add_edge(8, 4) + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Assign same colors so the two opposite sides are identical + for node in G1.nodes(): + color = "red" + G1.nodes[node]["label"] = color + G2.nodes[mapped[node]]["label"] = color + + assert vf2pp_isomorphism(G1, G2, node_label="label") + + def test_custom_graph4_different_labels(self): + G1 = nx.Graph() + edges1 = [ + (1, 2), + (2, 3), + (3, 8), + (3, 4), + (4, 5), + (4, 6), + (3, 6), + (8, 7), + (8, 9), + (5, 9), + (10, 11), + (11, 12), + (12, 13), + (11, 13), + ] + + mapped = { + 1: "n", + 2: "m", + 3: "l", + 4: "j", + 5: "k", + 6: "i", + 7: "g", + 8: "h", + 9: "f", + 10: "b", + 11: "a", + 12: "d", + 13: "e", + } + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + def test_custom_graph4_same_labels(self): + G1 = nx.Graph() + edges1 = [ + (1, 2), + (2, 3), + (3, 8), + (3, 4), + (4, 5), + (4, 6), + (3, 6), + (8, 7), + (8, 9), + (5, 9), + (10, 11), + (11, 12), + (12, 13), + (11, 13), + ] + + mapped = { + 1: "n", + 2: "m", + 3: "l", + 4: "j", + 5: "k", + 6: "i", + 7: "g", + 8: "h", + 9: "f", + 10: "b", + 11: "a", + 12: "d", + 13: "e", + } + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Add nodes of different label + G1.add_node(0) + G2.add_node("z") + G1.nodes[0]["label"] = "green" + G2.nodes["z"]["label"] = "blue" + + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Make the labels identical + G2.nodes["z"]["label"] = "green" + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Change the structure of the graphs, keeping them isomorphic + G1.add_edge(2, 5) + G2.remove_edge("i", "l") + G2.add_edge("g", "l") + G2.add_edge("m", "f") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Change the structure of the disconnected sub-graph, keeping it isomorphic + G1.remove_node(13) + G2.remove_node("d") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Connect the newly added node to the disconnected graph, which now is just a path of size 3 + G1.add_edge(0, 10) + G2.add_edge("e", "z") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Connect the two disconnected sub-graphs, forming a single graph + G1.add_edge(11, 3) + G1.add_edge(0, 8) + G2.add_edge("a", "l") + G2.add_edge("z", "j") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + def test_custom_graph5_same_labels(self): + G1 = nx.Graph() + edges1 = [ + (1, 5), + (1, 2), + (1, 4), + (2, 3), + (2, 6), + (3, 4), + (3, 7), + (4, 8), + (5, 8), + (5, 6), + (6, 7), + (7, 8), + ] + mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"} + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Add different edges in each graph, maintaining symmetry + G1.add_edges_from([(3, 6), (2, 7), (2, 5), (1, 3), (4, 7), (6, 8)]) + G2.add_edges_from( + [ + (mapped[6], mapped[3]), + (mapped[2], mapped[7]), + (mapped[1], mapped[6]), + (mapped[5], mapped[7]), + (mapped[3], mapped[8]), + (mapped[2], mapped[4]), + ] + ) + assert vf2pp_isomorphism(G1, G2, node_label="label") + + # Obtain two different but isomorphic subgraphs from G1 and G2 + H1 = nx.Graph(G1.subgraph([1, 5, 8, 6, 7, 3])) + H2 = nx.Graph( + G2.subgraph( + [mapped[1], mapped[4], mapped[8], mapped[7], mapped[3], mapped[5]] + ) + ) + assert vf2pp_isomorphism(H1, H2, node_label="label") + + # Delete corresponding node from the two graphs + H1.remove_node(8) + H2.remove_node(mapped[7]) + assert vf2pp_isomorphism(H1, H2, node_label="label") + + # Re-orient, maintaining isomorphism + H1.add_edge(1, 6) + H1.remove_edge(3, 6) + assert vf2pp_isomorphism(H1, H2, node_label="label") + + def test_custom_graph5_different_labels(self): + G1 = nx.Graph() + edges1 = [ + (1, 5), + (1, 2), + (1, 4), + (2, 3), + (2, 6), + (3, 4), + (3, 7), + (4, 8), + (5, 8), + (5, 6), + (6, 7), + (7, 8), + ] + mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"} + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + colors = ["red", "blue", "grey", "none", "brown", "solarized", "yellow", "pink"] + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + # Assign different colors to matching nodes + c = 0 + for node in G1.nodes(): + color1 = colors[c] + color2 = colors[(c + 3) % len(colors)] + G1.nodes[node]["label"] = color1 + G2.nodes[mapped[node]]["label"] = color2 + c += 1 + + assert vf2pp_isomorphism(G1, G2, node_label="label") is None + + # Get symmetrical sub-graphs of G1,G2 and compare them + H1 = G1.subgraph([1, 5]) + H2 = G2.subgraph(["i", "c"]) + c = 0 + for node1, node2 in zip(H1.nodes(), H2.nodes()): + H1.nodes[node1]["label"] = "red" + H2.nodes[node2]["label"] = "red" + c += 1 + + assert vf2pp_isomorphism(H1, H2, node_label="label") + + def test_disconnected_graph_all_same_labels(self): + G1 = nx.Graph() + G1.add_nodes_from(list(range(10))) + + mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0} + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + assert vf2pp_isomorphism(G1, G2, node_label="label") + + def test_disconnected_graph_all_different_labels(self): + G1 = nx.Graph() + G1.add_nodes_from(list(range(10))) + + mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0} + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped + + def test_disconnected_graph_some_same_labels(self): + G1 = nx.Graph() + G1.add_nodes_from(list(range(10))) + + mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0} + G2 = nx.relabel_nodes(G1, mapped) + + colors = [ + "white", + "white", + "white", + "purple", + "purple", + "red", + "red", + "pink", + "pink", + "pink", + ] + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors))), "label") + nx.set_node_attributes( + G2, dict(zip([mapped[n] for n in G1], it.cycle(colors))), "label" + ) + + assert vf2pp_isomorphism(G1, G2, node_label="label") + + +class TestMultiGraphISOVF2pp: + def test_custom_multigraph1_same_labels(self): + G1 = nx.MultiGraph() + + mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"} + edges1 = [ + (1, 2), + (1, 3), + (1, 4), + (1, 4), + (1, 4), + (2, 3), + (2, 6), + (2, 6), + (3, 4), + (3, 4), + (5, 1), + (5, 1), + (5, 2), + (5, 2), + ] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Transfer the 2-clique to the right side of G1 + G1.remove_edges_from([(2, 6), (2, 6)]) + G1.add_edges_from([(3, 6), (3, 6)]) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Delete an edges, making them symmetrical, so the position of the 2-clique doesn't matter + G2.remove_edge(mapped[1], mapped[4]) + G1.remove_edge(1, 4) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Add self-loops + G1.add_edges_from([(5, 5), (5, 5), (1, 1)]) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Compensate in G2 + G2.add_edges_from( + [(mapped[1], mapped[1]), (mapped[4], mapped[4]), (mapped[4], mapped[4])] + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + def test_custom_multigraph1_different_labels(self): + G1 = nx.MultiGraph() + + mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"} + edges1 = [ + (1, 2), + (1, 3), + (1, 4), + (1, 4), + (1, 4), + (2, 3), + (2, 6), + (2, 6), + (3, 4), + (3, 4), + (5, 1), + (5, 1), + (5, 2), + (5, 2), + ] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + assert m == mapped + + # Re-structure G1, maintaining the degree sequence + G1.remove_edge(1, 4) + G1.add_edge(1, 5) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Restructure G2, making it isomorphic to G1 + G2.remove_edge("A", "D") + G2.add_edge("A", "Z") + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + assert m == mapped + + # Add edge from node to itself + G1.add_edges_from([(6, 6), (6, 6), (6, 6)]) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Same for G2 + G2.add_edges_from([("E", "E"), ("E", "E"), ("E", "E")]) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + assert m == mapped + + def test_custom_multigraph2_same_labels(self): + G1 = nx.MultiGraph() + + mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"} + edges1 = [ + (1, 2), + (1, 2), + (1, 5), + (1, 5), + (1, 5), + (5, 6), + (2, 3), + (2, 3), + (2, 4), + (3, 4), + (3, 4), + (4, 5), + (4, 5), + (4, 5), + (2, 7), + (2, 7), + (2, 7), + ] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Obtain two non-isomorphic subgraphs from the graph + G2.remove_edges_from([(mapped[1], mapped[2]), (mapped[1], mapped[2])]) + G2.add_edge(mapped[1], mapped[4]) + H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 7])) + H2 = nx.MultiGraph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]])) + + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert not m + + # Make them isomorphic + H1.remove_edge(3, 4) + H1.add_edges_from([(2, 3), (2, 4), (2, 4)]) + H2.add_edges_from([(mapped[5], mapped[6]), (mapped[5], mapped[6])]) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Remove triangle edge + H1.remove_edges_from([(2, 3), (2, 3), (2, 3)]) + H2.remove_edges_from([(mapped[5], mapped[4])] * 3) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Change the edge orientation such that H1 is rotated H2 + H1.remove_edges_from([(2, 7), (2, 7)]) + H1.add_edges_from([(3, 4), (3, 4)]) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Add extra edges maintaining degree sequence, but in a non-symmetrical manner + H2.add_edge(mapped[5], mapped[1]) + H1.add_edge(3, 4) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert not m + + def test_custom_multigraph2_different_labels(self): + G1 = nx.MultiGraph() + + mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"} + edges1 = [ + (1, 2), + (1, 2), + (1, 5), + (1, 5), + (1, 5), + (5, 6), + (2, 3), + (2, 3), + (2, 4), + (3, 4), + (3, 4), + (4, 5), + (4, 5), + (4, 5), + (2, 7), + (2, 7), + (2, 7), + ] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + assert m == mapped + + # Re-structure G1 + G1.remove_edge(2, 7) + G1.add_edge(5, 6) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Same for G2 + G2.remove_edge("B", "C") + G2.add_edge("G", "F") + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + assert m == mapped + + # Delete node from G1 and G2, keeping them isomorphic + G1.remove_node(3) + G2.remove_node("D") + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Change G1 edges + G1.remove_edge(1, 2) + G1.remove_edge(2, 7) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Make G2 identical to G1, but with different edge orientation and different labels + G2.add_edges_from([("A", "C"), ("C", "E"), ("C", "E")]) + G2.remove_edges_from( + [("A", "G"), ("A", "G"), ("F", "G"), ("E", "G"), ("E", "G")] + ) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Make all labels the same, so G1 and G2 are also isomorphic + for n1, n2 in zip(G1.nodes(), G2.nodes()): + G1.nodes[n1]["label"] = "blue" + G2.nodes[n2]["label"] = "blue" + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + def test_custom_multigraph3_same_labels(self): + G1 = nx.MultiGraph() + + mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2} + edges1 = [ + (1, 2), + (1, 3), + (1, 3), + (2, 3), + (2, 3), + (3, 4), + (4, 5), + (4, 7), + (4, 9), + (4, 9), + (4, 9), + (5, 8), + (5, 8), + (8, 9), + (8, 9), + (5, 6), + (6, 7), + (6, 7), + (6, 7), + (5, 2), + ] + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Connect nodes maintaining symmetry + G1.add_edges_from([(6, 9), (7, 8), (5, 8), (4, 9), (4, 9)]) + G2.add_edges_from( + [ + (mapped[6], mapped[8]), + (mapped[7], mapped[9]), + (mapped[5], mapped[8]), + (mapped[4], mapped[9]), + (mapped[4], mapped[9]), + ] + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Make isomorphic + G1.add_edges_from([(6, 8), (6, 8), (7, 9), (7, 9), (7, 9)]) + G2.add_edges_from( + [ + (mapped[6], mapped[8]), + (mapped[6], mapped[9]), + (mapped[7], mapped[8]), + (mapped[7], mapped[9]), + (mapped[7], mapped[9]), + ] + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Connect more nodes + G1.add_edges_from([(2, 7), (2, 7), (3, 6), (3, 6)]) + G2.add_edges_from( + [ + (mapped[2], mapped[7]), + (mapped[2], mapped[7]), + (mapped[3], mapped[6]), + (mapped[3], mapped[6]), + ] + ) + G1.add_node(10) + G2.add_node("Z") + G1.nodes[10]["label"] = "blue" + G2.nodes["Z"]["label"] = "blue" + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Connect the newly added node, to opposite sides of the graph + G1.add_edges_from([(10, 1), (10, 5), (10, 8), (10, 10), (10, 10)]) + G2.add_edges_from( + [ + ("Z", mapped[1]), + ("Z", mapped[4]), + ("Z", mapped[9]), + ("Z", "Z"), + ("Z", "Z"), + ] + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # We connected the new node to opposite sides, so G1 must be symmetrical to G2. Re-structure them to be so + G1.remove_edges_from([(1, 3), (4, 9), (4, 9), (7, 9)]) + G2.remove_edges_from( + [ + (mapped[1], mapped[3]), + (mapped[4], mapped[9]), + (mapped[4], mapped[9]), + (mapped[7], mapped[9]), + ] + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Get two subgraphs that are not isomorphic but are easy to make + H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10])) + H2 = nx.Graph( + G2.subgraph( + [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"] + ) + ) + + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert not m + + # Restructure both to make them isomorphic + H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)]) + H2.add_edges_from( + [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])] + ) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Remove one self-loop in H2 + H2.remove_edge("Z", "Z") + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert not m + + # Compensate in H1 + H1.remove_edge(10, 10) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + def test_custom_multigraph3_different_labels(self): + G1 = nx.MultiGraph() + + mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2} + edges1 = [ + (1, 2), + (1, 3), + (1, 3), + (2, 3), + (2, 3), + (3, 4), + (4, 5), + (4, 7), + (4, 9), + (4, 9), + (4, 9), + (5, 8), + (5, 8), + (8, 9), + (8, 9), + (5, 6), + (6, 7), + (6, 7), + (6, 7), + (5, 2), + ] + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + assert m == mapped + + # Delete edge maintaining isomorphism + G1.remove_edge(4, 9) + G2.remove_edge(4, 6) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + assert m == mapped + + # Change edge orientation such that G1 mirrors G2 + G1.add_edges_from([(4, 9), (1, 2), (1, 2)]) + G1.remove_edges_from([(1, 3), (1, 3)]) + G2.add_edges_from([(3, 5), (7, 9)]) + G2.remove_edge(8, 9) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Make all labels the same, so G1 and G2 are also isomorphic + for n1, n2 in zip(G1.nodes(), G2.nodes()): + G1.nodes[n1]["label"] = "blue" + G2.nodes[n2]["label"] = "blue" + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + G1.add_node(10) + G2.add_node("Z") + G1.nodes[10]["label"] = "green" + G2.nodes["Z"]["label"] = "green" + + # Add different number of edges between the new nodes and themselves + G1.add_edges_from([(10, 10), (10, 10)]) + G2.add_edges_from([("Z", "Z")]) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Make the number of self-edges equal + G1.remove_edge(10, 10) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Connect the new node to the graph + G1.add_edges_from([(10, 3), (10, 4)]) + G2.add_edges_from([("Z", 8), ("Z", 3)]) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Remove central node + G1.remove_node(4) + G2.remove_node(3) + G1.add_edges_from([(5, 6), (5, 6), (5, 7)]) + G2.add_edges_from([(1, 6), (1, 6), (6, 2)]) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + def test_custom_multigraph4_same_labels(self): + G1 = nx.MultiGraph() + edges1 = [ + (1, 2), + (1, 2), + (2, 2), + (2, 3), + (3, 8), + (3, 8), + (3, 4), + (4, 5), + (4, 5), + (4, 5), + (4, 6), + (3, 6), + (3, 6), + (6, 6), + (8, 7), + (7, 7), + (8, 9), + (9, 9), + (8, 9), + (8, 9), + (5, 9), + (10, 11), + (11, 12), + (12, 13), + (11, 13), + (10, 10), + (10, 11), + (11, 13), + ] + + mapped = { + 1: "n", + 2: "m", + 3: "l", + 4: "j", + 5: "k", + 6: "i", + 7: "g", + 8: "h", + 9: "f", + 10: "b", + 11: "a", + 12: "d", + 13: "e", + } + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Add extra but corresponding edges to both graphs + G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)]) + G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")]) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Obtain subgraphs + H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6, 10, 11, 12, 13])) + H2 = nx.MultiGraph( + G2.subgraph( + [ + mapped[2], + mapped[3], + mapped[8], + mapped[9], + mapped[10], + mapped[11], + mapped[12], + mapped[13], + ] + ) + ) + + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert not m + + # Make them isomorphic + H2.remove_edges_from( + [(mapped[3], mapped[2]), (mapped[9], mapped[8]), (mapped[2], mapped[2])] + ) + H2.add_edges_from([(mapped[9], mapped[9]), (mapped[2], mapped[8])]) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Re-structure the disconnected sub-graph + H1.remove_node(12) + H2.remove_node(mapped[12]) + H1.add_edge(13, 13) + H2.add_edge(mapped[13], mapped[13]) + + # Connect the two disconnected components, forming a single graph + H1.add_edges_from([(3, 13), (6, 11)]) + H2.add_edges_from([(mapped[8], mapped[10]), (mapped[2], mapped[11])]) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Change orientation of self-loops in one graph, maintaining the degree sequence + H1.remove_edges_from([(2, 2), (3, 6)]) + H1.add_edges_from([(6, 6), (2, 3)]) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert not m + + def test_custom_multigraph4_different_labels(self): + G1 = nx.MultiGraph() + edges1 = [ + (1, 2), + (1, 2), + (2, 2), + (2, 3), + (3, 8), + (3, 8), + (3, 4), + (4, 5), + (4, 5), + (4, 5), + (4, 6), + (3, 6), + (3, 6), + (6, 6), + (8, 7), + (7, 7), + (8, 9), + (9, 9), + (8, 9), + (8, 9), + (5, 9), + (10, 11), + (11, 12), + (12, 13), + (11, 13), + ] + + mapped = { + 1: "n", + 2: "m", + 3: "l", + 4: "j", + 5: "k", + 6: "i", + 7: "g", + 8: "h", + 9: "f", + 10: "b", + 11: "a", + 12: "d", + 13: "e", + } + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m == mapped + + # Add extra but corresponding edges to both graphs + G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)]) + G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")]) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m == mapped + + # Obtain isomorphic subgraphs + H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6])) + H2 = nx.MultiGraph(G2.subgraph(["m", "l", "j", "i"])) + + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Delete the 3-clique, keeping only the path-graph. Also, H1 mirrors H2 + H1.remove_node(4) + H2.remove_node("j") + H1.remove_edges_from([(2, 2), (2, 3), (6, 6)]) + H2.remove_edges_from([("l", "i"), ("m", "m"), ("m", "m")]) + + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert not m + + # Assign the same labels so that mirroring means isomorphic + for n1, n2 in zip(H1.nodes(), H2.nodes()): + H1.nodes[n1]["label"] = "red" + H2.nodes[n2]["label"] = "red" + + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Leave only one node with self-loop + H1.remove_nodes_from([3, 6]) + H2.remove_nodes_from(["m", "l"]) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Remove one self-loop from H1 + H1.remove_edge(2, 2) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert not m + + # Same for H2 + H2.remove_edge("i", "i") + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Compose H1 with the disconnected sub-graph of G1. Same for H2 + S1 = nx.compose(H1, nx.MultiGraph(G1.subgraph([10, 11, 12, 13]))) + S2 = nx.compose(H2, nx.MultiGraph(G2.subgraph(["a", "b", "d", "e"]))) + + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + # Connect the two components + S1.add_edges_from([(13, 13), (13, 13), (2, 13)]) + S2.add_edges_from([("a", "a"), ("a", "a"), ("i", "e")]) + m = vf2pp_isomorphism(H1, H2, node_label="label") + assert m + + def test_custom_multigraph5_same_labels(self): + G1 = nx.MultiGraph() + + edges1 = [ + (1, 5), + (1, 2), + (1, 4), + (2, 3), + (2, 6), + (3, 4), + (3, 7), + (4, 8), + (5, 8), + (5, 6), + (6, 7), + (7, 8), + ] + mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"} + + G1.add_edges_from(edges1) + G2 = nx.relabel_nodes(G1, mapped) + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Add multiple edges and self-loops, maintaining isomorphism + G1.add_edges_from( + [(1, 2), (1, 2), (3, 7), (8, 8), (8, 8), (7, 8), (2, 3), (5, 6)] + ) + G2.add_edges_from( + [ + ("a", "h"), + ("a", "h"), + ("d", "j"), + ("c", "c"), + ("c", "c"), + ("j", "c"), + ("d", "h"), + ("g", "b"), + ] + ) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Make G2 to be the rotated G1 + G2.remove_edges_from( + [ + ("a", "h"), + ("a", "h"), + ("d", "j"), + ("c", "c"), + ("c", "c"), + ("j", "c"), + ("d", "h"), + ("g", "b"), + ] + ) + G2.add_edges_from( + [ + ("d", "i"), + ("a", "h"), + ("g", "b"), + ("g", "b"), + ("i", "i"), + ("i", "i"), + ("b", "j"), + ("d", "j"), + ] + ) + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + def test_disconnected_multigraph_all_same_labels(self): + G1 = nx.MultiGraph() + G1.add_nodes_from(list(range(10))) + G1.add_edges_from([(i, i) for i in range(10)]) + + mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0} + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label") + nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label") + + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Add self-loops to non-mapped nodes. Should be the same, as the graph is disconnected. + G1.add_edges_from([(i, i) for i in range(5, 8)] * 3) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Compensate in G2 + G2.add_edges_from([(i, i) for i in range(3)] * 3) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + # Add one more self-loop in G2 + G2.add_edges_from([(0, 0), (1, 1), (1, 1)]) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Compensate in G1 + G1.add_edges_from([(5, 5), (7, 7), (7, 7)]) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + def test_disconnected_multigraph_all_different_labels(self): + G1 = nx.MultiGraph() + G1.add_nodes_from(list(range(10))) + G1.add_edges_from([(i, i) for i in range(10)]) + + mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0} + G2 = nx.relabel_nodes(G1, mapped) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip([mapped[n] for n in G1], it.cycle(labels_many))), + "label", + ) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + assert m == mapped + + # Add self-loops to non-mapped nodes. Now it is not the same, as there are different labels + G1.add_edges_from([(i, i) for i in range(5, 8)] * 3) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Add self-loops to non mapped nodes in G2 as well + G2.add_edges_from([(mapped[i], mapped[i]) for i in range(3)] * 7) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Add self-loops to mapped nodes in G2 + G2.add_edges_from([(mapped[i], mapped[i]) for i in range(5, 8)] * 3) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert not m + + # Add self-loops to G1 so that they are even in both graphs + G1.add_edges_from([(i, i) for i in range(3)] * 7) + m = vf2pp_isomorphism(G1, G2, node_label="label") + assert m + + +class TestDiGraphISOVF2pp: + def test_wikipedia_graph(self): + edges1 = [ + (1, 5), + (1, 2), + (1, 4), + (3, 2), + (6, 2), + (3, 4), + (7, 3), + (4, 8), + (5, 8), + (6, 5), + (6, 7), + (7, 8), + ] + mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"} + + G1 = nx.DiGraph(edges1) + G2 = nx.relabel_nodes(G1, mapped) + + assert vf2pp_isomorphism(G1, G2) == mapped + + # Change the direction of an edge + G1.remove_edge(1, 5) + G1.add_edge(5, 1) + assert vf2pp_isomorphism(G1, G2) is None + + def test_non_isomorphic_same_degree_sequence(self): + r""" + G1 G2 + x--------------x x--------------x + | \ | | \ | + | x-------x | | x-------x | + | | | | | | | | + | x-------x | | x-------x | + | / | | \ | + x--------------x x--------------x + """ + edges1 = [ + (1, 5), + (1, 2), + (4, 1), + (3, 2), + (3, 4), + (4, 8), + (5, 8), + (6, 5), + (6, 7), + (7, 8), + ] + edges2 = [ + (1, 5), + (1, 2), + (4, 1), + (3, 2), + (4, 3), + (5, 8), + (6, 5), + (6, 7), + (3, 7), + (8, 7), + ] + + G1 = nx.DiGraph(edges1) + G2 = nx.DiGraph(edges2) + assert vf2pp_isomorphism(G1, G2) is None diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..0e29b1be61734603537f2431eb2de9f1ebfab459 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py @@ -0,0 +1,3106 @@ +import itertools as it + +import pytest + +import networkx as nx +from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism +from networkx.algorithms.isomorphism.vf2pp import ( + _consistent_PT, + _cut_PT, + _feasibility, + _find_candidates, + _find_candidates_Di, + _GraphParameters, + _initialize_parameters, + _matching_order, + _restore_Tinout, + _restore_Tinout_Di, + _StateParameters, + _update_Tinout, +) + +labels_same = ["blue"] + +labels_many = [ + "white", + "red", + "blue", + "green", + "orange", + "black", + "purple", + "yellow", + "brown", + "cyan", + "solarized", + "pink", + "none", +] + + +class TestNodeOrdering: + def test_empty_graph(self): + G1 = nx.Graph() + G2 = nx.Graph() + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + assert len(set(_matching_order(gparams))) == 0 + + def test_single_node(self): + G1 = nx.Graph() + G2 = nx.Graph() + G1.add_node(1) + G2.add_node(1) + + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label") + nx.set_node_attributes( + G2, + dict(zip(G2, it.cycle(labels_many))), + "label", + ) + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), + ) + + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + m = _matching_order(gparams) + assert m == [1] + + def test_matching_order(self): + labels = [ + "blue", + "blue", + "red", + "red", + "red", + "red", + "green", + "green", + "green", + "yellow", + "purple", + "purple", + "blue", + "blue", + ] + G1 = nx.Graph( + [ + (0, 1), + (0, 2), + (1, 2), + (2, 5), + (2, 4), + (1, 3), + (1, 4), + (3, 6), + (4, 6), + (6, 7), + (7, 8), + (9, 10), + (9, 11), + (11, 12), + (11, 13), + (12, 13), + (10, 13), + ] + ) + G2 = G1.copy() + nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels))), "label") + nx.set_node_attributes( + G2, + dict(zip(G2, it.cycle(labels))), + "label", + ) + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), + ) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + expected = [9, 11, 10, 13, 12, 1, 2, 4, 0, 3, 6, 5, 7, 8] + assert _matching_order(gparams) == expected + + def test_matching_order_all_branches(self): + G1 = nx.Graph( + [(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4), (2, 4), (3, 4)] + ) + G1.add_node(5) + G2 = G1.copy() + + G1.nodes[0]["label"] = "black" + G1.nodes[1]["label"] = "blue" + G1.nodes[2]["label"] = "blue" + G1.nodes[3]["label"] = "red" + G1.nodes[4]["label"] = "red" + G1.nodes[5]["label"] = "blue" + + G2.nodes[0]["label"] = "black" + G2.nodes[1]["label"] = "blue" + G2.nodes[2]["label"] = "blue" + G2.nodes[3]["label"] = "red" + G2.nodes[4]["label"] = "red" + G2.nodes[5]["label"] = "blue" + + l1, l2 = ( + nx.get_node_attributes(G1, "label"), + nx.get_node_attributes(G2, "label"), + ) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + expected = [0, 4, 1, 3, 2, 5] + assert _matching_order(gparams) == expected + + +class TestGraphCandidateSelection: + G1_edges = [ + (1, 2), + (1, 4), + (1, 5), + (2, 3), + (2, 4), + (3, 4), + (4, 5), + (1, 6), + (6, 7), + (6, 8), + (8, 9), + (7, 9), + ] + mapped = { + 0: "x", + 1: "a", + 2: "b", + 3: "c", + 4: "d", + 5: "e", + 6: "f", + 7: "g", + 8: "h", + 9: "i", + } + + def test_no_covered_neighbors_no_labels(self): + G1 = nx.Graph() + G1.add_edges_from(self.G1_edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, self.mapped) + + G1_degree = dict(G1.degree) + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + m = {9: self.mapped[9], 1: self.mapped[1]} + m_rev = {self.mapped[9]: 9, self.mapped[1]: 1} + + T1 = {7, 8, 2, 4, 5} + T1_tilde = {0, 3, 6} + T2 = {"g", "h", "b", "d", "e"} + T2_tilde = {"x", "c", "f"} + + sparams = _StateParameters( + m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None + ) + + u = 3 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + u = 0 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + m.pop(9) + m_rev.pop(self.mapped[9]) + + T1 = {2, 4, 5, 6} + T1_tilde = {0, 3, 7, 8, 9} + T2 = {"g", "h", "b", "d", "e", "f"} + T2_tilde = {"x", "c", "g", "h", "i"} + + sparams = _StateParameters( + m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None + ) + + u = 7 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == { + self.mapped[u], + self.mapped[8], + self.mapped[3], + self.mapped[9], + } + + def test_no_covered_neighbors_with_labels(self): + G1 = nx.Graph() + G1.add_edges_from(self.G1_edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, self.mapped) + + G1_degree = dict(G1.degree) + nx.set_node_attributes( + G1, + dict(zip(G1, it.cycle(labels_many))), + "label", + ) + nx.set_node_attributes( + G2, + dict( + zip( + [self.mapped[n] for n in G1], + it.cycle(labels_many), + ) + ), + "label", + ) + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + m = {9: self.mapped[9], 1: self.mapped[1]} + m_rev = {self.mapped[9]: 9, self.mapped[1]: 1} + + T1 = {7, 8, 2, 4, 5, 6} + T1_tilde = {0, 3} + T2 = {"g", "h", "b", "d", "e", "f"} + T2_tilde = {"x", "c"} + + sparams = _StateParameters( + m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None + ) + + u = 3 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + u = 0 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + # Change label of disconnected node + G1.nodes[u]["label"] = "blue" + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + # No candidate + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == set() + + m.pop(9) + m_rev.pop(self.mapped[9]) + + T1 = {2, 4, 5, 6} + T1_tilde = {0, 3, 7, 8, 9} + T2 = {"b", "d", "e", "f"} + T2_tilde = {"x", "c", "g", "h", "i"} + + sparams = _StateParameters( + m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None + ) + + u = 7 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + G1.nodes[8]["label"] = G1.nodes[7]["label"] + G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"] + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u], self.mapped[8]} + + def test_covered_neighbors_no_labels(self): + G1 = nx.Graph() + G1.add_edges_from(self.G1_edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, self.mapped) + + G1_degree = dict(G1.degree) + l1 = dict(G1.nodes(data=None, default=-1)) + l2 = dict(G2.nodes(data=None, default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + m = {9: self.mapped[9], 1: self.mapped[1]} + m_rev = {self.mapped[9]: 9, self.mapped[1]: 1} + + T1 = {7, 8, 2, 4, 5, 6} + T1_tilde = {0, 3} + T2 = {"g", "h", "b", "d", "e", "f"} + T2_tilde = {"x", "c"} + + sparams = _StateParameters( + m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None + ) + + u = 5 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + u = 6 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u], self.mapped[2]} + + def test_covered_neighbors_with_labels(self): + G1 = nx.Graph() + G1.add_edges_from(self.G1_edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, self.mapped) + + G1_degree = dict(G1.degree) + nx.set_node_attributes( + G1, + dict(zip(G1, it.cycle(labels_many))), + "label", + ) + nx.set_node_attributes( + G2, + dict( + zip( + [self.mapped[n] for n in G1], + it.cycle(labels_many), + ) + ), + "label", + ) + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + m = {9: self.mapped[9], 1: self.mapped[1]} + m_rev = {self.mapped[9]: 9, self.mapped[1]: 1} + + T1 = {7, 8, 2, 4, 5, 6} + T1_tilde = {0, 3} + T2 = {"g", "h", "b", "d", "e", "f"} + T2_tilde = {"x", "c"} + + sparams = _StateParameters( + m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None + ) + + u = 5 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + u = 6 + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + # Assign to 2, the same label as 6 + G1.nodes[2]["label"] = G1.nodes[u]["label"] + G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"] + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups(dict(G2.degree())), + ) + + candidates = _find_candidates(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u], self.mapped[2]} + + +class TestDiGraphCandidateSelection: + G1_edges = [ + (1, 2), + (1, 4), + (5, 1), + (2, 3), + (4, 2), + (3, 4), + (4, 5), + (1, 6), + (6, 7), + (6, 8), + (8, 9), + (7, 9), + ] + mapped = { + 0: "x", + 1: "a", + 2: "b", + 3: "c", + 4: "d", + 5: "e", + 6: "f", + 7: "g", + 8: "h", + 9: "i", + } + + def test_no_covered_neighbors_no_labels(self): + G1 = nx.DiGraph() + G1.add_edges_from(self.G1_edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, self.mapped) + + G1_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree) + } + + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + m = {9: self.mapped[9], 1: self.mapped[1]} + m_rev = {self.mapped[9]: 9, self.mapped[1]: 1} + + T1_out = {2, 4, 6} + T1_in = {5, 7, 8} + T1_tilde = {0, 3} + T2_out = {"b", "d", "f"} + T2_in = {"e", "g", "h"} + T2_tilde = {"x", "c"} + + sparams = _StateParameters( + m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None + ) + + u = 3 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + u = 0 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + m.pop(9) + m_rev.pop(self.mapped[9]) + + T1_out = {2, 4, 6} + T1_in = {5} + T1_tilde = {0, 3, 7, 8, 9} + T2_out = {"b", "d", "f"} + T2_in = {"e"} + T2_tilde = {"x", "c", "g", "h", "i"} + + sparams = _StateParameters( + m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None + ) + + u = 7 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u], self.mapped[8], self.mapped[3]} + + def test_no_covered_neighbors_with_labels(self): + G1 = nx.DiGraph() + G1.add_edges_from(self.G1_edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, self.mapped) + + G1_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree) + } + nx.set_node_attributes( + G1, + dict(zip(G1, it.cycle(labels_many))), + "label", + ) + nx.set_node_attributes( + G2, + dict( + zip( + [self.mapped[n] for n in G1], + it.cycle(labels_many), + ) + ), + "label", + ) + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + m = {9: self.mapped[9], 1: self.mapped[1]} + m_rev = {self.mapped[9]: 9, self.mapped[1]: 1} + + T1_out = {2, 4, 6} + T1_in = {5, 7, 8} + T1_tilde = {0, 3} + T2_out = {"b", "d", "f"} + T2_in = {"e", "g", "h"} + T2_tilde = {"x", "c"} + + sparams = _StateParameters( + m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None + ) + + u = 3 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + u = 0 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + # Change label of disconnected node + G1.nodes[u]["label"] = "blue" + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + # No candidate + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == set() + + m.pop(9) + m_rev.pop(self.mapped[9]) + + T1_out = {2, 4, 6} + T1_in = {5} + T1_tilde = {0, 3, 7, 8, 9} + T2_out = {"b", "d", "f"} + T2_in = {"e"} + T2_tilde = {"x", "c", "g", "h", "i"} + + sparams = _StateParameters( + m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None + ) + + u = 7 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + G1.nodes[8]["label"] = G1.nodes[7]["label"] + G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"] + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u], self.mapped[8]} + + def test_covered_neighbors_no_labels(self): + G1 = nx.DiGraph() + G1.add_edges_from(self.G1_edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, self.mapped) + + G1_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree) + } + + l1 = dict(G1.nodes(data=None, default=-1)) + l2 = dict(G2.nodes(data=None, default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + m = {9: self.mapped[9], 1: self.mapped[1]} + m_rev = {self.mapped[9]: 9, self.mapped[1]: 1} + + T1_out = {2, 4, 6} + T1_in = {5, 7, 8} + T1_tilde = {0, 3} + T2_out = {"b", "d", "f"} + T2_in = {"e", "g", "h"} + T2_tilde = {"x", "c"} + + sparams = _StateParameters( + m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None + ) + + u = 5 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + u = 6 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + # Change the direction of an edge to make the degree orientation same as first candidate of u. + G1.remove_edge(4, 2) + G1.add_edge(2, 4) + G2.remove_edge("d", "b") + G2.add_edge("b", "d") + + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u], self.mapped[2]} + + def test_covered_neighbors_with_labels(self): + G1 = nx.DiGraph() + G1.add_edges_from(self.G1_edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, self.mapped) + + G1.remove_edge(4, 2) + G1.add_edge(2, 4) + G2.remove_edge("d", "b") + G2.add_edge("b", "d") + + G1_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree) + } + + nx.set_node_attributes( + G1, + dict(zip(G1, it.cycle(labels_many))), + "label", + ) + nx.set_node_attributes( + G2, + dict( + zip( + [self.mapped[n] for n in G1], + it.cycle(labels_many), + ) + ), + "label", + ) + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + m = {9: self.mapped[9], 1: self.mapped[1]} + m_rev = {self.mapped[9]: 9, self.mapped[1]: 1} + + T1_out = {2, 4, 6} + T1_in = {5, 7, 8} + T1_tilde = {0, 3} + T2_out = {"b", "d", "f"} + T2_in = {"e", "g", "h"} + T2_tilde = {"x", "c"} + + sparams = _StateParameters( + m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None + ) + + u = 5 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + u = 6 + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + # Assign to 2, the same label as 6 + G1.nodes[2]["label"] = G1.nodes[u]["label"] + G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"] + l1 = dict(G1.nodes(data="label", default=-1)) + l2 = dict(G2.nodes(data="label", default=-1)) + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u], self.mapped[2]} + + # Change the direction of an edge to make the degree orientation same as first candidate of u. + G1.remove_edge(2, 4) + G1.add_edge(4, 2) + G2.remove_edge("b", "d") + G2.add_edge("d", "b") + + gparams = _GraphParameters( + G1, + G2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + G2.in_degree(), G2.out_degree() + ) + } + ), + ) + + candidates = _find_candidates_Di(u, gparams, sparams, G1_degree) + assert candidates == {self.mapped[u]} + + def test_same_in_out_degrees_no_candidate(self): + g1 = nx.DiGraph([(4, 1), (4, 2), (3, 4), (5, 4), (6, 4)]) + g2 = nx.DiGraph([(1, 4), (2, 4), (3, 4), (4, 5), (4, 6)]) + + l1 = dict(g1.nodes(data=None, default=-1)) + l2 = dict(g2.nodes(data=None, default=-1)) + gparams = _GraphParameters( + g1, + g2, + l1, + l2, + nx.utils.groups(l1), + nx.utils.groups(l2), + nx.utils.groups( + { + node: (in_degree, out_degree) + for (node, in_degree), (_, out_degree) in zip( + g2.in_degree(), g2.out_degree() + ) + } + ), + ) + + g1_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(g1.in_degree, g1.out_degree) + } + + m = {1: 1, 2: 2, 3: 3} + m_rev = m.copy() + + T1_out = {4} + T1_in = {4} + T1_tilde = {5, 6} + T2_out = {4} + T2_in = {4} + T2_tilde = {5, 6} + + sparams = _StateParameters( + m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None + ) + + u = 4 + # despite the same in and out degree, there's no candidate for u=4 + candidates = _find_candidates_Di(u, gparams, sparams, g1_degree) + assert candidates == set() + # Notice how the regular candidate selection method returns wrong result. + assert _find_candidates(u, gparams, sparams, g1_degree) == {4} + + +class TestGraphISOFeasibility: + def test_const_covered_neighbors(self): + G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2)]) + G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "a"), ("k", "c")]) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_no_covered_neighbors(self): + G1 = nx.Graph([(0, 1), (1, 2), (3, 4), (3, 5)]) + G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")]) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_mixed_covered_uncovered_neighbors(self): + G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)]) + G2 = nx.Graph( + [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")] + ) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_fail_cases(self): + G1 = nx.Graph( + [ + (0, 1), + (1, 2), + (10, 0), + (10, 3), + (10, 4), + (10, 5), + (10, 6), + (4, 1), + (5, 3), + ] + ) + G2 = nx.Graph( + [ + ("a", "b"), + ("b", "c"), + ("k", "a"), + ("k", "d"), + ("k", "e"), + ("k", "f"), + ("k", "g"), + ("e", "b"), + ("f", "d"), + ] + ) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 10, "k" + assert _consistent_PT(u, v, gparams, sparams) + + # Delete one uncovered neighbor of u. Notice how it still passes the test. + # Two reasons for this: + # 1. If u, v had different degrees from the beginning, they wouldn't + # be selected as candidates in the first place. + # 2. Even if they are selected, consistency is basically 1-look-ahead, + # meaning that we take into consideration the relation of the + # candidates with their mapped neighbors. The node we deleted is + # not a covered neighbor. + # Such nodes will be checked by the cut_PT function, which is + # basically the 2-look-ahead, checking the relation of the + # candidates with T1, T2 (in which belongs the node we just deleted). + G1.remove_node(6) + assert _consistent_PT(u, v, gparams, sparams) + + # Add one more covered neighbor of u in G1 + G1.add_edge(u, 2) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G2 + G2.add_edge(v, "c") + assert _consistent_PT(u, v, gparams, sparams) + + # Add one more covered neighbor of v in G2 + G2.add_edge(v, "x") + G1.add_node(7) + sparams.mapping.update({7: "x"}) + sparams.reverse_mapping.update({"x": 7}) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compendate in G1 + G1.add_edge(u, 7) + assert _consistent_PT(u, v, gparams, sparams) + + @pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) + def test_cut_inconsistent_labels(self, graph_type): + G1 = graph_type( + [ + (0, 1), + (1, 2), + (10, 0), + (10, 3), + (10, 4), + (10, 5), + (10, 6), + (4, 1), + (5, 3), + ] + ) + G2 = graph_type( + [ + ("a", "b"), + ("b", "c"), + ("k", "a"), + ("k", "d"), + ("k", "e"), + ("k", "f"), + ("k", "g"), + ("e", "b"), + ("f", "d"), + ] + ) + + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + l1.update({6: "green"}) # Change the label of one neighbor of u + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + + u, v = 10, "k" + assert _cut_PT(u, v, gparams, sparams) + + def test_cut_consistent_labels(self): + G1 = nx.Graph( + [ + (0, 1), + (1, 2), + (10, 0), + (10, 3), + (10, 4), + (10, 5), + (10, 6), + (4, 1), + (5, 3), + ] + ) + G2 = nx.Graph( + [ + ("a", "b"), + ("b", "c"), + ("k", "a"), + ("k", "d"), + ("k", "e"), + ("k", "f"), + ("k", "g"), + ("e", "b"), + ("f", "d"), + ] + ) + + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5}, + None, + {6}, + None, + {"e", "f"}, + None, + {"g"}, + None, + ) + + u, v = 10, "k" + assert not _cut_PT(u, v, gparams, sparams) + + def test_cut_same_labels(self): + G1 = nx.Graph( + [ + (0, 1), + (1, 2), + (10, 0), + (10, 3), + (10, 4), + (10, 5), + (10, 6), + (4, 1), + (5, 3), + ] + ) + mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"} + G2 = nx.relabel_nodes(G1, mapped) + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5}, + None, + {6}, + None, + {"e", "f"}, + None, + {"g"}, + None, + ) + + u, v = 10, "k" + assert not _cut_PT(u, v, gparams, sparams) + + # Change intersection between G1[u] and T1, so it's not the same as the one between G2[v] and T2 + G1.remove_edge(u, 4) + assert _cut_PT(u, v, gparams, sparams) + + # Compensate in G2 + G2.remove_edge(v, mapped[4]) + assert not _cut_PT(u, v, gparams, sparams) + + # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde + G2.remove_edge(v, mapped[6]) + assert _cut_PT(u, v, gparams, sparams) + + # Compensate in G1 + G1.remove_edge(u, 6) + assert not _cut_PT(u, v, gparams, sparams) + + # Add disconnected nodes, which will form the new Ti_out + G1.add_nodes_from([6, 7, 8]) + G2.add_nodes_from(["g", "y", "z"]) + sparams.T1_tilde.update({6, 7, 8}) + sparams.T2_tilde.update({"g", "y", "z"}) + + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + + assert not _cut_PT(u, v, gparams, sparams) + + # Add some new nodes to the mapping + sparams.mapping.update({6: "g", 7: "y"}) + sparams.reverse_mapping.update({"g": 6, "y": 7}) + + # Add more nodes to T1, T2. + G1.add_edges_from([(6, 20), (7, 20), (6, 21)]) + G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")]) + + sparams.mapping.update({20: "j", 21: "i"}) + sparams.reverse_mapping.update({"j": 20, "i": 21}) + sparams.T1.update({20, 21}) + sparams.T2.update({"i", "j"}) + sparams.T1_tilde.difference_update({6, 7}) + sparams.T2_tilde.difference_update({"g", "y"}) + + assert not _cut_PT(u, v, gparams, sparams) + + # Add nodes from the new T1 and T2, as neighbors of u and v respectively + G1.add_edges_from([(u, 20), (u, 21)]) + G2.add_edges_from([(v, "i"), (v, "j")]) + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + + assert not _cut_PT(u, v, gparams, sparams) + + # Change the edges, maintaining the G1[u]-T1 intersection + G1.remove_edge(u, 20) + G1.add_edge(u, 4) + assert not _cut_PT(u, v, gparams, sparams) + + # Connect u to 8 which is still in T1_tilde + G1.add_edge(u, 8) + assert _cut_PT(u, v, gparams, sparams) + + # Same for v and z, so that inters(G1[u], T1out) == inters(G2[v], T2out) + G2.add_edge(v, "z") + assert not _cut_PT(u, v, gparams, sparams) + + def test_cut_different_labels(self): + G1 = nx.Graph( + [ + (0, 1), + (1, 2), + (1, 14), + (0, 4), + (1, 5), + (2, 6), + (3, 7), + (3, 6), + (4, 10), + (4, 9), + (6, 10), + (20, 9), + (20, 15), + (20, 12), + (20, 11), + (12, 13), + (11, 13), + (20, 8), + (20, 3), + (20, 5), + (20, 0), + ] + ) + mapped = { + 0: "a", + 1: "b", + 2: "c", + 3: "d", + 4: "e", + 5: "f", + 6: "g", + 7: "h", + 8: "i", + 9: "j", + 10: "k", + 11: "l", + 12: "m", + 13: "n", + 14: "o", + 15: "p", + 20: "x", + } + G2 = nx.relabel_nodes(G1, mapped) + + l1 = {n: "none" for n in G1.nodes()} + l2 = {} + + l1.update( + { + 9: "blue", + 15: "blue", + 12: "blue", + 11: "green", + 3: "green", + 8: "red", + 0: "red", + 5: "yellow", + } + ) + l2.update({mapped[n]: l for n, l in l1.items()}) + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5, 6, 7, 14}, + None, + {9, 10, 15, 12, 11, 13, 8}, + None, + {"e", "f", "g", "h", "o"}, + None, + {"j", "k", "l", "m", "n", "i", "p"}, + None, + ) + + u, v = 20, "x" + assert not _cut_PT(u, v, gparams, sparams) + + # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact + l1.update({9: "red"}) + assert _cut_PT(u, v, gparams, sparams) + + # compensate in G2 + l2.update({mapped[9]: "red"}) + assert not _cut_PT(u, v, gparams, sparams) + + # Change the intersection of G1[u] and T1 + G1.add_edge(u, 4) + assert _cut_PT(u, v, gparams, sparams) + + # Same for G2[v] and T2 + G2.add_edge(v, mapped[4]) + assert not _cut_PT(u, v, gparams, sparams) + + # Change the intersection of G2[v] and T2_tilde + G2.remove_edge(v, mapped[8]) + assert _cut_PT(u, v, gparams, sparams) + + # Same for G1[u] and T1_tilde + G1.remove_edge(u, 8) + assert not _cut_PT(u, v, gparams, sparams) + + # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes + G1.add_edge(8, 3) + G2.add_edge(mapped[8], mapped[3]) + sparams.T1.add(8) + sparams.T2.add(mapped[8]) + sparams.T1_tilde.remove(8) + sparams.T2_tilde.remove(mapped[8]) + + assert not _cut_PT(u, v, gparams, sparams) + + # Remove neighbor of u from T1 + G1.remove_node(5) + l1.pop(5) + sparams.T1.remove(5) + assert _cut_PT(u, v, gparams, sparams) + + # Same in G2 + G2.remove_node(mapped[5]) + l2.pop(mapped[5]) + sparams.T2.remove(mapped[5]) + assert not _cut_PT(u, v, gparams, sparams) + + def test_feasibility_same_labels(self): + G1 = nx.Graph( + [ + (0, 1), + (1, 2), + (1, 14), + (0, 4), + (1, 5), + (2, 6), + (3, 7), + (3, 6), + (4, 10), + (4, 9), + (6, 10), + (20, 9), + (20, 15), + (20, 12), + (20, 11), + (12, 13), + (11, 13), + (20, 8), + (20, 2), + (20, 5), + (20, 0), + ] + ) + mapped = { + 0: "a", + 1: "b", + 2: "c", + 3: "d", + 4: "e", + 5: "f", + 6: "g", + 7: "h", + 8: "i", + 9: "j", + 10: "k", + 11: "l", + 12: "m", + 13: "n", + 14: "o", + 15: "p", + 20: "x", + } + G2 = nx.relabel_nodes(G1, mapped) + + l1 = {n: "blue" for n in G1.nodes()} + l2 = {mapped[n]: "blue" for n in G1.nodes()} + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5, 6, 7, 14}, + None, + {9, 10, 15, 12, 11, 13, 8}, + None, + {"e", "f", "g", "h", "o"}, + None, + {"j", "k", "l", "m", "n", "i", "p"}, + None, + ) + + u, v = 20, "x" + assert not _cut_PT(u, v, gparams, sparams) + + # Change structure in G2 such that, ONLY consistency is harmed + G2.remove_edge(mapped[20], mapped[2]) + G2.add_edge(mapped[20], mapped[3]) + + # Consistency check fails, while the cutting rules are satisfied! + assert not _cut_PT(u, v, gparams, sparams) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G1 and make it consistent + G1.remove_edge(20, 2) + G1.add_edge(20, 3) + assert not _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + # ONLY fail the cutting check + G2.add_edge(v, mapped[10]) + assert _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + def test_feasibility_different_labels(self): + G1 = nx.Graph( + [ + (0, 1), + (1, 2), + (1, 14), + (0, 4), + (1, 5), + (2, 6), + (3, 7), + (3, 6), + (4, 10), + (4, 9), + (6, 10), + (20, 9), + (20, 15), + (20, 12), + (20, 11), + (12, 13), + (11, 13), + (20, 8), + (20, 2), + (20, 5), + (20, 0), + ] + ) + mapped = { + 0: "a", + 1: "b", + 2: "c", + 3: "d", + 4: "e", + 5: "f", + 6: "g", + 7: "h", + 8: "i", + 9: "j", + 10: "k", + 11: "l", + 12: "m", + 13: "n", + 14: "o", + 15: "p", + 20: "x", + } + G2 = nx.relabel_nodes(G1, mapped) + + l1 = {n: "none" for n in G1.nodes()} + l2 = {} + + l1.update( + { + 9: "blue", + 15: "blue", + 12: "blue", + 11: "green", + 2: "green", + 8: "red", + 0: "red", + 5: "yellow", + } + ) + l2.update({mapped[n]: l for n, l in l1.items()}) + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5, 6, 7, 14}, + None, + {9, 10, 15, 12, 11, 13, 8}, + None, + {"e", "f", "g", "h", "o"}, + None, + {"j", "k", "l", "m", "n", "i", "p"}, + None, + ) + + u, v = 20, "x" + assert not _cut_PT(u, v, gparams, sparams) + + # Change structure in G2 such that, ONLY consistency is harmed + G2.remove_edge(mapped[20], mapped[2]) + G2.add_edge(mapped[20], mapped[3]) + l2.update({mapped[3]: "green"}) + + # Consistency check fails, while the cutting rules are satisfied! + assert not _cut_PT(u, v, gparams, sparams) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G1 and make it consistent + G1.remove_edge(20, 2) + G1.add_edge(20, 3) + l1.update({3: "green"}) + assert not _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + # ONLY fail the cutting check + l1.update({5: "red"}) + assert _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + +class TestMultiGraphISOFeasibility: + def test_const_covered_neighbors(self): + G1 = nx.MultiGraph( + [(0, 1), (0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2)] + ) + G2 = nx.MultiGraph( + [ + ("a", "b"), + ("a", "b"), + ("b", "c"), + ("k", "a"), + ("k", "a"), + ("k", "a"), + ("k", "c"), + ("k", "c"), + ] + ) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_no_covered_neighbors(self): + G1 = nx.MultiGraph([(0, 1), (0, 1), (1, 2), (3, 4), (3, 4), (3, 5)]) + G2 = nx.MultiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "w"), ("k", "z")]) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_mixed_covered_uncovered_neighbors(self): + G1 = nx.MultiGraph( + [(0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2), (3, 4), (3, 5)] + ) + G2 = nx.MultiGraph( + [ + ("a", "b"), + ("b", "c"), + ("k", "a"), + ("k", "a"), + ("k", "a"), + ("k", "c"), + ("k", "c"), + ("k", "w"), + ("k", "z"), + ] + ) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_fail_cases(self): + G1 = nx.MultiGraph( + [ + (0, 1), + (1, 2), + (10, 0), + (10, 0), + (10, 0), + (10, 3), + (10, 3), + (10, 4), + (10, 5), + (10, 6), + (10, 6), + (4, 1), + (5, 3), + ] + ) + mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"} + G2 = nx.relabel_nodes(G1, mapped) + + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 10, "k" + assert _consistent_PT(u, v, gparams, sparams) + + # Delete one uncovered neighbor of u. Notice how it still passes the test. Two reasons for this: + # 1. If u, v had different degrees from the beginning, they wouldn't be selected as candidates in the first + # place. + # 2. Even if they are selected, consistency is basically 1-look-ahead, meaning that we take into consideration + # the relation of the candidates with their mapped neighbors. The node we deleted is not a covered neighbor. + # Such nodes will be checked by the cut_PT function, which is basically the 2-look-ahead, checking the + # relation of the candidates with T1, T2 (in which belongs the node we just deleted). + G1.remove_node(6) + assert _consistent_PT(u, v, gparams, sparams) + + # Add one more covered neighbor of u in G1 + G1.add_edge(u, 2) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G2 + G2.add_edge(v, "c") + assert _consistent_PT(u, v, gparams, sparams) + + # Add one more covered neighbor of v in G2 + G2.add_edge(v, "x") + G1.add_node(7) + sparams.mapping.update({7: "x"}) + sparams.reverse_mapping.update({"x": 7}) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compendate in G1 + G1.add_edge(u, 7) + assert _consistent_PT(u, v, gparams, sparams) + + # Delete an edge between u and a covered neighbor + G1.remove_edges_from([(u, 0), (u, 0)]) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G2 + G2.remove_edges_from([(v, mapped[0]), (v, mapped[0])]) + assert _consistent_PT(u, v, gparams, sparams) + + # Remove an edge between v and a covered neighbor + G2.remove_edge(v, mapped[3]) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G1 + G1.remove_edge(u, 3) + assert _consistent_PT(u, v, gparams, sparams) + + def test_cut_same_labels(self): + G1 = nx.MultiGraph( + [ + (0, 1), + (1, 2), + (10, 0), + (10, 0), + (10, 0), + (10, 3), + (10, 3), + (10, 4), + (10, 4), + (10, 5), + (10, 5), + (10, 5), + (10, 5), + (10, 6), + (10, 6), + (4, 1), + (5, 3), + ] + ) + mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"} + G2 = nx.relabel_nodes(G1, mapped) + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5}, + None, + {6}, + None, + {"e", "f"}, + None, + {"g"}, + None, + ) + + u, v = 10, "k" + assert not _cut_PT(u, v, gparams, sparams) + + # Remove one of the multiple edges between u and a neighbor + G1.remove_edge(u, 4) + assert _cut_PT(u, v, gparams, sparams) + + # Compensate in G2 + G1.remove_edge(u, 4) + G2.remove_edges_from([(v, mapped[4]), (v, mapped[4])]) + assert not _cut_PT(u, v, gparams, sparams) + + # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde + G2.remove_edge(v, mapped[6]) + assert _cut_PT(u, v, gparams, sparams) + + # Compensate in G1 + G1.remove_edge(u, 6) + assert not _cut_PT(u, v, gparams, sparams) + + # Add more edges between u and neighbor which belongs in T1_tilde + G1.add_edges_from([(u, 5), (u, 5), (u, 5)]) + assert _cut_PT(u, v, gparams, sparams) + + # Compensate in G2 + G2.add_edges_from([(v, mapped[5]), (v, mapped[5]), (v, mapped[5])]) + assert not _cut_PT(u, v, gparams, sparams) + + # Add disconnected nodes, which will form the new Ti_out + G1.add_nodes_from([6, 7, 8]) + G2.add_nodes_from(["g", "y", "z"]) + G1.add_edges_from([(u, 6), (u, 6), (u, 6), (u, 8)]) + G2.add_edges_from([(v, "g"), (v, "g"), (v, "g"), (v, "z")]) + + sparams.T1_tilde.update({6, 7, 8}) + sparams.T2_tilde.update({"g", "y", "z"}) + + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + + assert not _cut_PT(u, v, gparams, sparams) + + # Add some new nodes to the mapping + sparams.mapping.update({6: "g", 7: "y"}) + sparams.reverse_mapping.update({"g": 6, "y": 7}) + + # Add more nodes to T1, T2. + G1.add_edges_from([(6, 20), (7, 20), (6, 21)]) + G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")]) + + sparams.T1.update({20, 21}) + sparams.T2.update({"i", "j"}) + sparams.T1_tilde.difference_update({6, 7}) + sparams.T2_tilde.difference_update({"g", "y"}) + + assert not _cut_PT(u, v, gparams, sparams) + + # Remove some edges + G2.remove_edge(v, "g") + assert _cut_PT(u, v, gparams, sparams) + + G1.remove_edge(u, 6) + G1.add_edge(u, 8) + G2.add_edge(v, "z") + assert not _cut_PT(u, v, gparams, sparams) + + # Add nodes from the new T1 and T2, as neighbors of u and v respectively + G1.add_edges_from([(u, 20), (u, 20), (u, 20), (u, 21)]) + G2.add_edges_from([(v, "i"), (v, "i"), (v, "i"), (v, "j")]) + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + + assert not _cut_PT(u, v, gparams, sparams) + + # Change the edges + G1.remove_edge(u, 20) + G1.add_edge(u, 4) + assert _cut_PT(u, v, gparams, sparams) + + G2.remove_edge(v, "i") + G2.add_edge(v, mapped[4]) + assert not _cut_PT(u, v, gparams, sparams) + + def test_cut_different_labels(self): + G1 = nx.MultiGraph( + [ + (0, 1), + (0, 1), + (1, 2), + (1, 2), + (1, 14), + (0, 4), + (1, 5), + (2, 6), + (3, 7), + (3, 6), + (4, 10), + (4, 9), + (6, 10), + (20, 9), + (20, 9), + (20, 9), + (20, 15), + (20, 15), + (20, 12), + (20, 11), + (20, 11), + (20, 11), + (12, 13), + (11, 13), + (20, 8), + (20, 8), + (20, 3), + (20, 3), + (20, 5), + (20, 5), + (20, 5), + (20, 0), + (20, 0), + (20, 0), + ] + ) + mapped = { + 0: "a", + 1: "b", + 2: "c", + 3: "d", + 4: "e", + 5: "f", + 6: "g", + 7: "h", + 8: "i", + 9: "j", + 10: "k", + 11: "l", + 12: "m", + 13: "n", + 14: "o", + 15: "p", + 20: "x", + } + G2 = nx.relabel_nodes(G1, mapped) + + l1 = {n: "none" for n in G1.nodes()} + l2 = {} + + l1.update( + { + 9: "blue", + 15: "blue", + 12: "blue", + 11: "green", + 3: "green", + 8: "red", + 0: "red", + 5: "yellow", + } + ) + l2.update({mapped[n]: l for n, l in l1.items()}) + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5, 6, 7, 14}, + None, + {9, 10, 15, 12, 11, 13, 8}, + None, + {"e", "f", "g", "h", "o"}, + None, + {"j", "k", "l", "m", "n", "i", "p"}, + None, + ) + + u, v = 20, "x" + assert not _cut_PT(u, v, gparams, sparams) + + # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact + l1.update({9: "red"}) + assert _cut_PT(u, v, gparams, sparams) + + # compensate in G2 + l2.update({mapped[9]: "red"}) + assert not _cut_PT(u, v, gparams, sparams) + + # Change the intersection of G1[u] and T1 + G1.add_edge(u, 4) + assert _cut_PT(u, v, gparams, sparams) + + # Same for G2[v] and T2 + G2.add_edge(v, mapped[4]) + assert not _cut_PT(u, v, gparams, sparams) + + # Delete one from the multiple edges + G2.remove_edge(v, mapped[8]) + assert _cut_PT(u, v, gparams, sparams) + + # Same for G1[u] and T1_tilde + G1.remove_edge(u, 8) + assert not _cut_PT(u, v, gparams, sparams) + + # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes + G1.add_edges_from([(8, 3), (8, 3), (8, u)]) + G2.add_edges_from([(mapped[8], mapped[3]), (mapped[8], mapped[3])]) + sparams.T1.add(8) + sparams.T2.add(mapped[8]) + sparams.T1_tilde.remove(8) + sparams.T2_tilde.remove(mapped[8]) + + assert _cut_PT(u, v, gparams, sparams) + + # Fix uneven edges + G1.remove_edge(8, u) + assert not _cut_PT(u, v, gparams, sparams) + + # Remove neighbor of u from T1 + G1.remove_node(5) + l1.pop(5) + sparams.T1.remove(5) + assert _cut_PT(u, v, gparams, sparams) + + # Same in G2 + G2.remove_node(mapped[5]) + l2.pop(mapped[5]) + sparams.T2.remove(mapped[5]) + assert not _cut_PT(u, v, gparams, sparams) + + def test_feasibility_same_labels(self): + G1 = nx.MultiGraph( + [ + (0, 1), + (0, 1), + (1, 2), + (1, 2), + (1, 14), + (0, 4), + (1, 5), + (2, 6), + (3, 7), + (3, 6), + (4, 10), + (4, 9), + (6, 10), + (20, 9), + (20, 9), + (20, 9), + (20, 15), + (20, 15), + (20, 12), + (20, 11), + (20, 11), + (20, 11), + (12, 13), + (11, 13), + (20, 8), + (20, 8), + (20, 3), + (20, 3), + (20, 5), + (20, 5), + (20, 5), + (20, 0), + (20, 0), + (20, 0), + ] + ) + mapped = { + 0: "a", + 1: "b", + 2: "c", + 3: "d", + 4: "e", + 5: "f", + 6: "g", + 7: "h", + 8: "i", + 9: "j", + 10: "k", + 11: "l", + 12: "m", + 13: "n", + 14: "o", + 15: "p", + 20: "x", + } + G2 = nx.relabel_nodes(G1, mapped) + l1 = {n: "blue" for n in G1.nodes()} + l2 = {mapped[n]: "blue" for n in G1.nodes()} + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5, 6, 7, 14}, + None, + {9, 10, 15, 12, 11, 13, 8}, + None, + {"e", "f", "g", "h", "o"}, + None, + {"j", "k", "l", "m", "n", "i", "p"}, + None, + ) + + u, v = 20, "x" + assert not _cut_PT(u, v, gparams, sparams) + + # Change structure in G2 such that, ONLY consistency is harmed + G2.remove_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])]) + G2.add_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])]) + + # Consistency check fails, while the cutting rules are satisfied! + assert not _cut_PT(u, v, gparams, sparams) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G1 and make it consistent + G1.remove_edges_from([(20, 3), (20, 3)]) + G1.add_edges_from([(20, 2), (20, 2)]) + assert not _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + # ONLY fail the cutting check + G2.add_edges_from([(v, mapped[10])] * 5) + assert _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + # Pass all tests + G1.add_edges_from([(u, 10)] * 5) + assert not _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + def test_feasibility_different_labels(self): + G1 = nx.MultiGraph( + [ + (0, 1), + (0, 1), + (1, 2), + (1, 2), + (1, 14), + (0, 4), + (1, 5), + (2, 6), + (3, 7), + (3, 6), + (4, 10), + (4, 9), + (6, 10), + (20, 9), + (20, 9), + (20, 9), + (20, 15), + (20, 15), + (20, 12), + (20, 11), + (20, 11), + (20, 11), + (12, 13), + (11, 13), + (20, 8), + (20, 8), + (20, 2), + (20, 2), + (20, 5), + (20, 5), + (20, 5), + (20, 0), + (20, 0), + (20, 0), + ] + ) + mapped = { + 0: "a", + 1: "b", + 2: "c", + 3: "d", + 4: "e", + 5: "f", + 6: "g", + 7: "h", + 8: "i", + 9: "j", + 10: "k", + 11: "l", + 12: "m", + 13: "n", + 14: "o", + 15: "p", + 20: "x", + } + G2 = nx.relabel_nodes(G1, mapped) + l1 = {n: "none" for n in G1.nodes()} + l2 = {} + + l1.update( + { + 9: "blue", + 15: "blue", + 12: "blue", + 11: "green", + 2: "green", + 8: "red", + 0: "red", + 5: "yellow", + } + ) + l2.update({mapped[n]: l for n, l in l1.items()}) + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5, 6, 7, 14}, + None, + {9, 10, 15, 12, 11, 13, 8}, + None, + {"e", "f", "g", "h", "o"}, + None, + {"j", "k", "l", "m", "n", "i", "p"}, + None, + ) + + u, v = 20, "x" + assert not _cut_PT(u, v, gparams, sparams) + + # Change structure in G2 such that, ONLY consistency is harmed + G2.remove_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])]) + G2.add_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])]) + l2.update({mapped[3]: "green"}) + + # Consistency check fails, while the cutting rules are satisfied! + assert not _cut_PT(u, v, gparams, sparams) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G1 and make it consistent + G1.remove_edges_from([(20, 2), (20, 2)]) + G1.add_edges_from([(20, 3), (20, 3)]) + l1.update({3: "green"}) + assert not _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + # ONLY fail the cutting check + l1.update({5: "red"}) + assert _cut_PT(u, v, gparams, sparams) + assert _consistent_PT(u, v, gparams, sparams) + + +class TestDiGraphISOFeasibility: + def test_const_covered_neighbors(self): + G1 = nx.DiGraph([(0, 1), (1, 2), (0, 3), (2, 3)]) + G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("a", "k"), ("c", "k")]) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_no_covered_neighbors(self): + G1 = nx.DiGraph([(0, 1), (1, 2), (3, 4), (3, 5)]) + G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")]) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_mixed_covered_uncovered_neighbors(self): + G1 = nx.DiGraph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)]) + G2 = nx.DiGraph( + [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")] + ) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 3, "k" + assert _consistent_PT(u, v, gparams, sparams) + + def test_const_fail_cases(self): + G1 = nx.DiGraph( + [ + (0, 1), + (2, 1), + (10, 0), + (10, 3), + (10, 4), + (5, 10), + (10, 6), + (1, 4), + (5, 3), + ] + ) + G2 = nx.DiGraph( + [ + ("a", "b"), + ("c", "b"), + ("k", "a"), + ("k", "d"), + ("k", "e"), + ("f", "k"), + ("k", "g"), + ("b", "e"), + ("f", "d"), + ] + ) + gparams = _GraphParameters(G1, G2, None, None, None, None, None) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + u, v = 10, "k" + assert _consistent_PT(u, v, gparams, sparams) + + # Delete one uncovered neighbor of u. Notice how it still passes the + # test. Two reasons for this: + # 1. If u, v had different degrees from the beginning, they wouldn't + # be selected as candidates in the first place. + # 2. Even if they are selected, consistency is basically + # 1-look-ahead, meaning that we take into consideration the + # relation of the candidates with their mapped neighbors. + # The node we deleted is not a covered neighbor. + # Such nodes will be checked by the cut_PT function, which is + # basically the 2-look-ahead, checking the relation of the + # candidates with T1, T2 (in which belongs the node we just deleted). + G1.remove_node(6) + assert _consistent_PT(u, v, gparams, sparams) + + # Add one more covered neighbor of u in G1 + G1.add_edge(u, 2) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G2 + G2.add_edge(v, "c") + assert _consistent_PT(u, v, gparams, sparams) + + # Add one more covered neighbor of v in G2 + G2.add_edge(v, "x") + G1.add_node(7) + sparams.mapping.update({7: "x"}) + sparams.reverse_mapping.update({"x": 7}) + assert not _consistent_PT(u, v, gparams, sparams) + + # Compensate in G1 + G1.add_edge(u, 7) + assert _consistent_PT(u, v, gparams, sparams) + + def test_cut_inconsistent_labels(self): + G1 = nx.DiGraph( + [ + (0, 1), + (2, 1), + (10, 0), + (10, 3), + (10, 4), + (5, 10), + (10, 6), + (1, 4), + (5, 3), + ] + ) + G2 = nx.DiGraph( + [ + ("a", "b"), + ("c", "b"), + ("k", "a"), + ("k", "d"), + ("k", "e"), + ("f", "k"), + ("k", "g"), + ("b", "e"), + ("f", "d"), + ] + ) + + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + l1.update({5: "green"}) # Change the label of one neighbor of u + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + None, + None, + None, + None, + None, + None, + None, + None, + ) + + u, v = 10, "k" + assert _cut_PT(u, v, gparams, sparams) + + def test_cut_consistent_labels(self): + G1 = nx.DiGraph( + [ + (0, 1), + (2, 1), + (10, 0), + (10, 3), + (10, 4), + (5, 10), + (10, 6), + (1, 4), + (5, 3), + ] + ) + G2 = nx.DiGraph( + [ + ("a", "b"), + ("c", "b"), + ("k", "a"), + ("k", "d"), + ("k", "e"), + ("f", "k"), + ("k", "g"), + ("b", "e"), + ("f", "d"), + ] + ) + + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4}, + {5, 10}, + {6}, + None, + {"e"}, + {"f", "k"}, + {"g"}, + None, + ) + + u, v = 10, "k" + assert not _cut_PT(u, v, gparams, sparams) + + def test_cut_same_labels(self): + G1 = nx.DiGraph( + [ + (0, 1), + (2, 1), + (10, 0), + (10, 3), + (10, 4), + (5, 10), + (10, 6), + (1, 4), + (5, 3), + ] + ) + mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"} + G2 = nx.relabel_nodes(G1, mapped) + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4}, + {5, 10}, + {6}, + None, + {"e"}, + {"f", "k"}, + {"g"}, + None, + ) + + u, v = 10, "k" + assert not _cut_PT(u, v, gparams, sparams) + + # Change intersection between G1[u] and T1_out, so it's not the same as the one between G2[v] and T2_out + G1.remove_edge(u, 4) + assert _cut_PT(u, v, gparams, sparams) + + # Compensate in G2 + G2.remove_edge(v, mapped[4]) + assert not _cut_PT(u, v, gparams, sparams) + + # Change intersection between G1[u] and T1_in, so it's not the same as the one between G2[v] and T2_in + G1.remove_edge(5, u) + assert _cut_PT(u, v, gparams, sparams) + + # Compensate in G2 + G2.remove_edge(mapped[5], v) + assert not _cut_PT(u, v, gparams, sparams) + + # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde + G2.remove_edge(v, mapped[6]) + assert _cut_PT(u, v, gparams, sparams) + + # Compensate in G1 + G1.remove_edge(u, 6) + assert not _cut_PT(u, v, gparams, sparams) + + # Add disconnected nodes, which will form the new Ti_tilde + G1.add_nodes_from([6, 7, 8]) + G2.add_nodes_from(["g", "y", "z"]) + sparams.T1_tilde.update({6, 7, 8}) + sparams.T2_tilde.update({"g", "y", "z"}) + + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + + assert not _cut_PT(u, v, gparams, sparams) + + def test_cut_different_labels(self): + G1 = nx.DiGraph( + [ + (0, 1), + (1, 2), + (14, 1), + (0, 4), + (1, 5), + (2, 6), + (3, 7), + (3, 6), + (10, 4), + (4, 9), + (6, 10), + (20, 9), + (20, 15), + (20, 12), + (20, 11), + (12, 13), + (11, 13), + (20, 8), + (20, 3), + (20, 5), + (0, 20), + ] + ) + mapped = { + 0: "a", + 1: "b", + 2: "c", + 3: "d", + 4: "e", + 5: "f", + 6: "g", + 7: "h", + 8: "i", + 9: "j", + 10: "k", + 11: "l", + 12: "m", + 13: "n", + 14: "o", + 15: "p", + 20: "x", + } + G2 = nx.relabel_nodes(G1, mapped) + + l1 = {n: "none" for n in G1.nodes()} + l2 = {} + + l1.update( + { + 9: "blue", + 15: "blue", + 12: "blue", + 11: "green", + 3: "green", + 8: "red", + 0: "red", + 5: "yellow", + } + ) + l2.update({mapped[n]: l for n, l in l1.items()}) + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c", 3: "d"}, + {"a": 0, "b": 1, "c": 2, "d": 3}, + {4, 5, 6, 7, 20}, + {14, 20}, + {9, 10, 15, 12, 11, 13, 8}, + None, + {"e", "f", "g", "x"}, + {"o", "x"}, + {"j", "k", "l", "m", "n", "i", "p"}, + None, + ) + + u, v = 20, "x" + assert not _cut_PT(u, v, gparams, sparams) + + # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact + l1.update({9: "red"}) + assert _cut_PT(u, v, gparams, sparams) + + # compensate in G2 + l2.update({mapped[9]: "red"}) + assert not _cut_PT(u, v, gparams, sparams) + + # Change the intersection of G1[u] and T1_out + G1.add_edge(u, 4) + assert _cut_PT(u, v, gparams, sparams) + + # Same for G2[v] and T2_out + G2.add_edge(v, mapped[4]) + assert not _cut_PT(u, v, gparams, sparams) + + # Change the intersection of G1[u] and T1_in + G1.add_edge(u, 14) + assert _cut_PT(u, v, gparams, sparams) + + # Same for G2[v] and T2_in + G2.add_edge(v, mapped[14]) + assert not _cut_PT(u, v, gparams, sparams) + + # Change the intersection of G2[v] and T2_tilde + G2.remove_edge(v, mapped[8]) + assert _cut_PT(u, v, gparams, sparams) + + # Same for G1[u] and T1_tilde + G1.remove_edge(u, 8) + assert not _cut_PT(u, v, gparams, sparams) + + # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes + G1.add_edge(8, 3) + G2.add_edge(mapped[8], mapped[3]) + sparams.T1.add(8) + sparams.T2.add(mapped[8]) + sparams.T1_tilde.remove(8) + sparams.T2_tilde.remove(mapped[8]) + + assert not _cut_PT(u, v, gparams, sparams) + + # Remove neighbor of u from T1 + G1.remove_node(5) + l1.pop(5) + sparams.T1.remove(5) + assert _cut_PT(u, v, gparams, sparams) + + # Same in G2 + G2.remove_node(mapped[5]) + l2.pop(mapped[5]) + sparams.T2.remove(mapped[5]) + assert not _cut_PT(u, v, gparams, sparams) + + def test_predecessor_T1_in_fail(self): + G1 = nx.DiGraph( + [(0, 1), (0, 3), (4, 0), (1, 5), (5, 2), (3, 6), (4, 6), (6, 5)] + ) + mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g"} + G2 = nx.relabel_nodes(G1, mapped) + l1 = {n: "blue" for n in G1.nodes()} + l2 = {n: "blue" for n in G2.nodes()} + + gparams = _GraphParameters( + G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None + ) + sparams = _StateParameters( + {0: "a", 1: "b", 2: "c"}, + {"a": 0, "b": 1, "c": 2}, + {3, 5}, + {4, 5}, + {6}, + None, + {"d", "f"}, + {"f"}, # mapped[4] is missing from T2_in + {"g"}, + None, + ) + + u, v = 6, "g" + assert _cut_PT(u, v, gparams, sparams) + + sparams.T2_in.add("e") + assert not _cut_PT(u, v, gparams, sparams) + + +class TestGraphTinoutUpdating: + edges = [ + (1, 3), + (2, 3), + (3, 4), + (4, 9), + (4, 5), + (3, 9), + (5, 8), + (5, 7), + (8, 7), + (6, 7), + ] + mapped = { + 0: "x", + 1: "a", + 2: "b", + 3: "c", + 4: "d", + 5: "e", + 6: "f", + 7: "g", + 8: "h", + 9: "i", + } + G1 = nx.Graph() + G1.add_edges_from(edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, mapping=mapped) + + def test_updating(self): + G2_degree = dict(self.G2.degree) + gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree) + m, m_rev, T1, _, T1_tilde, _, T2, _, T2_tilde, _ = sparams + + # Add node to the mapping + m[4] = self.mapped[4] + m_rev[self.mapped[4]] = 4 + _update_Tinout(4, self.mapped[4], gparams, sparams) + + assert T1 == {3, 5, 9} + assert T2 == {"c", "i", "e"} + assert T1_tilde == {0, 1, 2, 6, 7, 8} + assert T2_tilde == {"x", "a", "b", "f", "g", "h"} + + # Add node to the mapping + m[5] = self.mapped[5] + m_rev.update({self.mapped[5]: 5}) + _update_Tinout(5, self.mapped[5], gparams, sparams) + + assert T1 == {3, 9, 8, 7} + assert T2 == {"c", "i", "h", "g"} + assert T1_tilde == {0, 1, 2, 6} + assert T2_tilde == {"x", "a", "b", "f"} + + # Add node to the mapping + m[6] = self.mapped[6] + m_rev.update({self.mapped[6]: 6}) + _update_Tinout(6, self.mapped[6], gparams, sparams) + + assert T1 == {3, 9, 8, 7} + assert T2 == {"c", "i", "h", "g"} + assert T1_tilde == {0, 1, 2} + assert T2_tilde == {"x", "a", "b"} + + # Add node to the mapping + m[3] = self.mapped[3] + m_rev.update({self.mapped[3]: 3}) + _update_Tinout(3, self.mapped[3], gparams, sparams) + + assert T1 == {1, 2, 9, 8, 7} + assert T2 == {"a", "b", "i", "h", "g"} + assert T1_tilde == {0} + assert T2_tilde == {"x"} + + # Add node to the mapping + m[0] = self.mapped[0] + m_rev.update({self.mapped[0]: 0}) + _update_Tinout(0, self.mapped[0], gparams, sparams) + + assert T1 == {1, 2, 9, 8, 7} + assert T2 == {"a", "b", "i", "h", "g"} + assert T1_tilde == set() + assert T2_tilde == set() + + def test_restoring(self): + m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"} + m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6} + + T1 = {1, 2, 7, 9, 8} + T2 = {"a", "b", "g", "i", "h"} + T1_tilde = set() + T2_tilde = set() + + gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {}) + sparams = _StateParameters( + m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None + ) + + # Remove a node from the mapping + m.pop(0) + m_rev.pop("x") + _restore_Tinout(0, self.mapped[0], gparams, sparams) + + assert T1 == {1, 2, 7, 9, 8} + assert T2 == {"a", "b", "g", "i", "h"} + assert T1_tilde == {0} + assert T2_tilde == {"x"} + + # Remove a node from the mapping + m.pop(6) + m_rev.pop("f") + _restore_Tinout(6, self.mapped[6], gparams, sparams) + + assert T1 == {1, 2, 7, 9, 8} + assert T2 == {"a", "b", "g", "i", "h"} + assert T1_tilde == {0, 6} + assert T2_tilde == {"x", "f"} + + # Remove a node from the mapping + m.pop(3) + m_rev.pop("c") + _restore_Tinout(3, self.mapped[3], gparams, sparams) + + assert T1 == {7, 9, 8, 3} + assert T2 == {"g", "i", "h", "c"} + assert T1_tilde == {0, 6, 1, 2} + assert T2_tilde == {"x", "f", "a", "b"} + + # Remove a node from the mapping + m.pop(5) + m_rev.pop("e") + _restore_Tinout(5, self.mapped[5], gparams, sparams) + + assert T1 == {9, 3, 5} + assert T2 == {"i", "c", "e"} + assert T1_tilde == {0, 6, 1, 2, 7, 8} + assert T2_tilde == {"x", "f", "a", "b", "g", "h"} + + # Remove a node from the mapping + m.pop(4) + m_rev.pop("d") + _restore_Tinout(4, self.mapped[4], gparams, sparams) + + assert T1 == set() + assert T2 == set() + assert T1_tilde == set(self.G1.nodes()) + assert T2_tilde == set(self.G2.nodes()) + + +class TestDiGraphTinoutUpdating: + edges = [ + (1, 3), + (3, 2), + (3, 4), + (4, 9), + (4, 5), + (3, 9), + (5, 8), + (5, 7), + (8, 7), + (7, 6), + ] + mapped = { + 0: "x", + 1: "a", + 2: "b", + 3: "c", + 4: "d", + 5: "e", + 6: "f", + 7: "g", + 8: "h", + 9: "i", + } + G1 = nx.DiGraph(edges) + G1.add_node(0) + G2 = nx.relabel_nodes(G1, mapping=mapped) + + def test_updating(self): + G2_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip( + self.G2.in_degree, self.G2.out_degree + ) + } + gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree) + m, m_rev, T1_out, T1_in, T1_tilde, _, T2_out, T2_in, T2_tilde, _ = sparams + + # Add node to the mapping + m[4] = self.mapped[4] + m_rev[self.mapped[4]] = 4 + _update_Tinout(4, self.mapped[4], gparams, sparams) + + assert T1_out == {5, 9} + assert T1_in == {3} + assert T2_out == {"i", "e"} + assert T2_in == {"c"} + assert T1_tilde == {0, 1, 2, 6, 7, 8} + assert T2_tilde == {"x", "a", "b", "f", "g", "h"} + + # Add node to the mapping + m[5] = self.mapped[5] + m_rev[self.mapped[5]] = 5 + _update_Tinout(5, self.mapped[5], gparams, sparams) + + assert T1_out == {9, 8, 7} + assert T1_in == {3} + assert T2_out == {"i", "g", "h"} + assert T2_in == {"c"} + assert T1_tilde == {0, 1, 2, 6} + assert T2_tilde == {"x", "a", "b", "f"} + + # Add node to the mapping + m[6] = self.mapped[6] + m_rev[self.mapped[6]] = 6 + _update_Tinout(6, self.mapped[6], gparams, sparams) + + assert T1_out == {9, 8, 7} + assert T1_in == {3, 7} + assert T2_out == {"i", "g", "h"} + assert T2_in == {"c", "g"} + assert T1_tilde == {0, 1, 2} + assert T2_tilde == {"x", "a", "b"} + + # Add node to the mapping + m[3] = self.mapped[3] + m_rev[self.mapped[3]] = 3 + _update_Tinout(3, self.mapped[3], gparams, sparams) + + assert T1_out == {9, 8, 7, 2} + assert T1_in == {7, 1} + assert T2_out == {"i", "g", "h", "b"} + assert T2_in == {"g", "a"} + assert T1_tilde == {0} + assert T2_tilde == {"x"} + + # Add node to the mapping + m[0] = self.mapped[0] + m_rev[self.mapped[0]] = 0 + _update_Tinout(0, self.mapped[0], gparams, sparams) + + assert T1_out == {9, 8, 7, 2} + assert T1_in == {7, 1} + assert T2_out == {"i", "g", "h", "b"} + assert T2_in == {"g", "a"} + assert T1_tilde == set() + assert T2_tilde == set() + + def test_restoring(self): + m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"} + m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6} + + T1_out = {2, 7, 9, 8} + T1_in = {1, 7} + T2_out = {"b", "g", "i", "h"} + T2_in = {"a", "g"} + T1_tilde = set() + T2_tilde = set() + + gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {}) + sparams = _StateParameters( + m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None + ) + + # Remove a node from the mapping + m.pop(0) + m_rev.pop("x") + _restore_Tinout_Di(0, self.mapped[0], gparams, sparams) + + assert T1_out == {2, 7, 9, 8} + assert T1_in == {1, 7} + assert T2_out == {"b", "g", "i", "h"} + assert T2_in == {"a", "g"} + assert T1_tilde == {0} + assert T2_tilde == {"x"} + + # Remove a node from the mapping + m.pop(6) + m_rev.pop("f") + _restore_Tinout_Di(6, self.mapped[6], gparams, sparams) + + assert T1_out == {2, 9, 8, 7} + assert T1_in == {1} + assert T2_out == {"b", "i", "h", "g"} + assert T2_in == {"a"} + assert T1_tilde == {0, 6} + assert T2_tilde == {"x", "f"} + + # Remove a node from the mapping + m.pop(3) + m_rev.pop("c") + _restore_Tinout_Di(3, self.mapped[3], gparams, sparams) + + assert T1_out == {9, 8, 7} + assert T1_in == {3} + assert T2_out == {"i", "h", "g"} + assert T2_in == {"c"} + assert T1_tilde == {0, 6, 1, 2} + assert T2_tilde == {"x", "f", "a", "b"} + + # Remove a node from the mapping + m.pop(5) + m_rev.pop("e") + _restore_Tinout_Di(5, self.mapped[5], gparams, sparams) + + assert T1_out == {9, 5} + assert T1_in == {3} + assert T2_out == {"i", "e"} + assert T2_in == {"c"} + assert T1_tilde == {0, 6, 1, 2, 8, 7} + assert T2_tilde == {"x", "f", "a", "b", "h", "g"} + + # Remove a node from the mapping + m.pop(4) + m_rev.pop("d") + _restore_Tinout_Di(4, self.mapped[4], gparams, sparams) + + assert T1_out == set() + assert T1_in == set() + assert T2_out == set() + assert T2_in == set() + assert T1_tilde == set(self.G1.nodes()) + assert T2_tilde == set(self.G2.nodes()) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py new file mode 100644 index 0000000000000000000000000000000000000000..66a434e05ceb7b42bb4e4893d26298001046386f --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py @@ -0,0 +1,200 @@ +""" + Tests for VF2 isomorphism algorithm for weighted graphs. +""" + +import math +from operator import eq + +import networkx as nx +import networkx.algorithms.isomorphism as iso + + +def test_simple(): + # 16 simple tests + w = "weight" + edges = [(0, 0, 1), (0, 0, 1.5), (0, 1, 2), (1, 0, 3)] + for g1 in [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]: + g1.add_weighted_edges_from(edges) + g2 = g1.subgraph(g1.nodes()) + if g1.is_multigraph(): + em = iso.numerical_multiedge_match("weight", 1) + else: + em = iso.numerical_edge_match("weight", 1) + assert nx.is_isomorphic(g1, g2, edge_match=em) + + for mod1, mod2 in [(False, True), (True, False), (True, True)]: + # mod1 tests a regular edge + # mod2 tests a selfloop + if g2.is_multigraph(): + if mod1: + data1 = {0: {"weight": 10}} + if mod2: + data2 = {0: {"weight": 1}, 1: {"weight": 2.5}} + else: + if mod1: + data1 = {"weight": 10} + if mod2: + data2 = {"weight": 2.5} + + g2 = g1.subgraph(g1.nodes()).copy() + if mod1: + if not g1.is_directed(): + g2._adj[1][0] = data1 + g2._adj[0][1] = data1 + else: + g2._succ[1][0] = data1 + g2._pred[0][1] = data1 + if mod2: + if not g1.is_directed(): + g2._adj[0][0] = data2 + else: + g2._succ[0][0] = data2 + g2._pred[0][0] = data2 + + assert not nx.is_isomorphic(g1, g2, edge_match=em) + + +def test_weightkey(): + g1 = nx.DiGraph() + g2 = nx.DiGraph() + + g1.add_edge("A", "B", weight=1) + g2.add_edge("C", "D", weight=0) + + assert nx.is_isomorphic(g1, g2) + em = iso.numerical_edge_match("nonexistent attribute", 1) + assert nx.is_isomorphic(g1, g2, edge_match=em) + em = iso.numerical_edge_match("weight", 1) + assert not nx.is_isomorphic(g1, g2, edge_match=em) + + g2 = nx.DiGraph() + g2.add_edge("C", "D") + assert nx.is_isomorphic(g1, g2, edge_match=em) + + +class TestNodeMatch_Graph: + def setup_method(self): + self.g1 = nx.Graph() + self.g2 = nx.Graph() + self.build() + + def build(self): + self.nm = iso.categorical_node_match("color", "") + self.em = iso.numerical_edge_match("weight", 1) + + self.g1.add_node("A", color="red") + self.g2.add_node("C", color="blue") + + self.g1.add_edge("A", "B", weight=1) + self.g2.add_edge("C", "D", weight=1) + + def test_noweight_nocolor(self): + assert nx.is_isomorphic(self.g1, self.g2) + + def test_color1(self): + assert not nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) + + def test_color2(self): + self.g1.nodes["A"]["color"] = "blue" + assert nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) + + def test_weight1(self): + assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) + + def test_weight2(self): + self.g1.add_edge("A", "B", weight=2) + assert not nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) + + def test_colorsandweights1(self): + iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) + assert not iso + + def test_colorsandweights2(self): + self.g1.nodes["A"]["color"] = "blue" + iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) + assert iso + + def test_colorsandweights3(self): + # make the weights disagree + self.g1.add_edge("A", "B", weight=2) + assert not nx.is_isomorphic( + self.g1, self.g2, node_match=self.nm, edge_match=self.em + ) + + +class TestEdgeMatch_MultiGraph: + def setup_method(self): + self.g1 = nx.MultiGraph() + self.g2 = nx.MultiGraph() + self.GM = iso.MultiGraphMatcher + self.build() + + def build(self): + g1 = self.g1 + g2 = self.g2 + + # We will assume integer weights only. + g1.add_edge("A", "B", color="green", weight=0, size=0.5) + g1.add_edge("A", "B", color="red", weight=1, size=0.35) + g1.add_edge("A", "B", color="red", weight=2, size=0.65) + + g2.add_edge("C", "D", color="green", weight=1, size=0.5) + g2.add_edge("C", "D", color="red", weight=0, size=0.45) + g2.add_edge("C", "D", color="red", weight=2, size=0.65) + + if g1.is_multigraph(): + self.em = iso.numerical_multiedge_match("weight", 1) + self.emc = iso.categorical_multiedge_match("color", "") + self.emcm = iso.categorical_multiedge_match(["color", "weight"], ["", 1]) + self.emg1 = iso.generic_multiedge_match("color", "red", eq) + self.emg2 = iso.generic_multiedge_match( + ["color", "weight", "size"], + ["red", 1, 0.5], + [eq, eq, math.isclose], + ) + else: + self.em = iso.numerical_edge_match("weight", 1) + self.emc = iso.categorical_edge_match("color", "") + self.emcm = iso.categorical_edge_match(["color", "weight"], ["", 1]) + self.emg1 = iso.generic_multiedge_match("color", "red", eq) + self.emg2 = iso.generic_edge_match( + ["color", "weight", "size"], + ["red", 1, 0.5], + [eq, eq, math.isclose], + ) + + def test_weights_only(self): + assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) + + def test_colors_only(self): + gm = self.GM(self.g1, self.g2, edge_match=self.emc) + assert gm.is_isomorphic() + + def test_colorsandweights(self): + gm = self.GM(self.g1, self.g2, edge_match=self.emcm) + assert not gm.is_isomorphic() + + def test_generic1(self): + gm = self.GM(self.g1, self.g2, edge_match=self.emg1) + assert gm.is_isomorphic() + + def test_generic2(self): + gm = self.GM(self.g1, self.g2, edge_match=self.emg2) + assert not gm.is_isomorphic() + + +class TestEdgeMatch_DiGraph(TestNodeMatch_Graph): + def setup_method(self): + TestNodeMatch_Graph.setup_method(self) + self.g1 = nx.DiGraph() + self.g2 = nx.DiGraph() + self.build() + + +class TestEdgeMatch_MultiDiGraph(TestEdgeMatch_MultiGraph): + def setup_method(self): + TestEdgeMatch_MultiGraph.setup_method(self) + self.g1 = nx.MultiDiGraph() + self.g2 = nx.MultiDiGraph() + self.GM = iso.MultiDiGraphMatcher + self.build() diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py new file mode 100644 index 0000000000000000000000000000000000000000..e409d515f1ce3c3058efa4edb26ea0c02cc6ae5d --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py @@ -0,0 +1,284 @@ +""" +An algorithm for finding if two undirected trees are isomorphic, +and if so returns an isomorphism between the two sets of nodes. + +This algorithm uses a routine to tell if two rooted trees (trees with a +specified root node) are isomorphic, which may be independently useful. + +This implements an algorithm from: +The Design and Analysis of Computer Algorithms +by Aho, Hopcroft, and Ullman +Addison-Wesley Publishing 1974 +Example 3.2 pp. 84-86. + +A more understandable version of this algorithm is described in: +Homework Assignment 5 +McGill University SOCS 308-250B, Winter 2002 +by Matthew Suderman +http://crypto.cs.mcgill.ca/~crepeau/CS250/2004/HW5+.pdf +""" + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + +__all__ = ["rooted_tree_isomorphism", "tree_isomorphism"] + + +@nx._dispatchable(graphs={"t1": 0, "t2": 2}, returns_graph=True) +def root_trees(t1, root1, t2, root2): + """Create a single digraph dT of free trees t1 and t2 + # with roots root1 and root2 respectively + # rename the nodes with consecutive integers + # so that all nodes get a unique name between both trees + + # our new "fake" root node is 0 + # t1 is numbers from 1 ... n + # t2 is numbered from n+1 to 2n + """ + + dT = nx.DiGraph() + + newroot1 = 1 # left root will be 1 + newroot2 = nx.number_of_nodes(t1) + 1 # right will be n+1 + + # may be overlap in node names here so need separate maps + # given the old name, what is the new + namemap1 = {root1: newroot1} + namemap2 = {root2: newroot2} + + # add an edge from our new root to root1 and root2 + dT.add_edge(0, namemap1[root1]) + dT.add_edge(0, namemap2[root2]) + + for i, (v1, v2) in enumerate(nx.bfs_edges(t1, root1)): + namemap1[v2] = i + namemap1[root1] + 1 + dT.add_edge(namemap1[v1], namemap1[v2]) + + for i, (v1, v2) in enumerate(nx.bfs_edges(t2, root2)): + namemap2[v2] = i + namemap2[root2] + 1 + dT.add_edge(namemap2[v1], namemap2[v2]) + + # now we really want the inverse of namemap1 and namemap2 + # giving the old name given the new + # since the values of namemap1 and namemap2 are unique + # there won't be collisions + namemap = {} + for old, new in namemap1.items(): + namemap[new] = old + for old, new in namemap2.items(): + namemap[new] = old + + return (dT, namemap, newroot1, newroot2) + + +# figure out the level of each node, with 0 at root +@nx._dispatchable +def assign_levels(G, root): + level = {} + level[root] = 0 + for v1, v2 in nx.bfs_edges(G, root): + level[v2] = level[v1] + 1 + + return level + + +# now group the nodes at each level +def group_by_levels(levels): + L = {} + for n, lev in levels.items(): + if lev not in L: + L[lev] = [] + L[lev].append(n) + + return L + + +# now lets get the isomorphism by walking the ordered_children +def generate_isomorphism(v, w, M, ordered_children): + # make sure tree1 comes first + assert v < w + M.append((v, w)) + for i, (x, y) in enumerate(zip(ordered_children[v], ordered_children[w])): + generate_isomorphism(x, y, M, ordered_children) + + +@nx._dispatchable(graphs={"t1": 0, "t2": 2}) +def rooted_tree_isomorphism(t1, root1, t2, root2): + """ + Given two rooted trees `t1` and `t2`, + with roots `root1` and `root2` respectively + this routine will determine if they are isomorphic. + + These trees may be either directed or undirected, + but if they are directed, all edges should flow from the root. + + It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes + of `t2`, such that two trees are then identical. + + Note that two trees may have more than one isomorphism, and this + routine just returns one valid mapping. + + Parameters + ---------- + `t1` : NetworkX graph + One of the trees being compared + + `root1` : a node of `t1` which is the root of the tree + + `t2` : undirected NetworkX graph + The other tree being compared + + `root2` : a node of `t2` which is the root of the tree + + This is a subroutine used to implement `tree_isomorphism`, but will + be somewhat faster if you already have rooted trees. + + Returns + ------- + isomorphism : list + A list of pairs in which the left element is a node in `t1` + and the right element is a node in `t2`. The pairs are in + arbitrary order. If the nodes in one tree is mapped to the names in + the other, then trees will be identical. Note that an isomorphism + will not necessarily be unique. + + If `t1` and `t2` are not isomorphic, then it returns the empty list. + """ + + assert nx.is_tree(t1) + assert nx.is_tree(t2) + + # get the rooted tree formed by combining them + # with unique names + (dT, namemap, newroot1, newroot2) = root_trees(t1, root1, t2, root2) + + # compute the distance from the root, with 0 for our + levels = assign_levels(dT, 0) + + # height + h = max(levels.values()) + + # collect nodes into a dict by level + L = group_by_levels(levels) + + # each node has a label, initially set to 0 + label = {v: 0 for v in dT} + # and also ordered_labels and ordered_children + # which will store ordered tuples + ordered_labels = {v: () for v in dT} + ordered_children = {v: () for v in dT} + + # nothing to do on last level so start on h-1 + # also nothing to do for our fake level 0, so skip that + for i in range(h - 1, 0, -1): + # update the ordered_labels and ordered_children + # for any children + for v in L[i]: + # nothing to do if no children + if dT.out_degree(v) > 0: + # get all the pairs of labels and nodes of children + # and sort by labels + s = sorted((label[u], u) for u in dT.successors(v)) + + # invert to give a list of two tuples + # the sorted labels, and the corresponding children + ordered_labels[v], ordered_children[v] = list(zip(*s)) + + # now collect and sort the sorted ordered_labels + # for all nodes in L[i], carrying along the node + forlabel = sorted((ordered_labels[v], v) for v in L[i]) + + # now assign labels to these nodes, according to the sorted order + # starting from 0, where identical ordered_labels get the same label + current = 0 + for i, (ol, v) in enumerate(forlabel): + # advance to next label if not 0, and different from previous + if (i != 0) and (ol != forlabel[i - 1][0]): + current += 1 + label[v] = current + + # they are isomorphic if the labels of newroot1 and newroot2 are 0 + isomorphism = [] + if label[newroot1] == 0 and label[newroot2] == 0: + generate_isomorphism(newroot1, newroot2, isomorphism, ordered_children) + + # get the mapping back in terms of the old names + # return in sorted order for neatness + isomorphism = [(namemap[u], namemap[v]) for (u, v) in isomorphism] + + return isomorphism + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(graphs={"t1": 0, "t2": 1}) +def tree_isomorphism(t1, t2): + """ + Given two undirected (or free) trees `t1` and `t2`, + this routine will determine if they are isomorphic. + It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes + of `t2`, such that two trees are then identical. + + Note that two trees may have more than one isomorphism, and this + routine just returns one valid mapping. + + Parameters + ---------- + t1 : undirected NetworkX graph + One of the trees being compared + + t2 : undirected NetworkX graph + The other tree being compared + + Returns + ------- + isomorphism : list + A list of pairs in which the left element is a node in `t1` + and the right element is a node in `t2`. The pairs are in + arbitrary order. If the nodes in one tree is mapped to the names in + the other, then trees will be identical. Note that an isomorphism + will not necessarily be unique. + + If `t1` and `t2` are not isomorphic, then it returns the empty list. + + Notes + ----- + This runs in O(n*log(n)) time for trees with n nodes. + """ + + assert nx.is_tree(t1) + assert nx.is_tree(t2) + + # To be isomorphic, t1 and t2 must have the same number of nodes. + if nx.number_of_nodes(t1) != nx.number_of_nodes(t2): + return [] + + # Another shortcut is that the sorted degree sequences need to be the same. + degree_sequence1 = sorted(d for (n, d) in t1.degree()) + degree_sequence2 = sorted(d for (n, d) in t2.degree()) + + if degree_sequence1 != degree_sequence2: + return [] + + # A tree can have either 1 or 2 centers. + # If the number doesn't match then t1 and t2 are not isomorphic. + center1 = nx.center(t1) + center2 = nx.center(t2) + + if len(center1) != len(center2): + return [] + + # If there is only 1 center in each, then use it. + if len(center1) == 1: + return rooted_tree_isomorphism(t1, center1[0], t2, center2[0]) + + # If there both have 2 centers, then try the first for t1 + # with the first for t2. + attempts = rooted_tree_isomorphism(t1, center1[0], t2, center2[0]) + + # If that worked we're done. + if len(attempts) > 0: + return attempts + + # Otherwise, try center1[0] with the center2[1], and see if that works + return rooted_tree_isomorphism(t1, center1[0], t2, center2[1]) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2pp.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2pp.py new file mode 100644 index 0000000000000000000000000000000000000000..589e06447cc12447f073eefe3025c909e248698c --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2pp.py @@ -0,0 +1,1068 @@ +""" +*************** +VF2++ Algorithm +*************** + +An implementation of the VF2++ algorithm [1]_ for Graph Isomorphism testing. + +The simplest interface to use this module is to call: + +`vf2pp_is_isomorphic`: to check whether two graphs are isomorphic. +`vf2pp_isomorphism`: to obtain the node mapping between two graphs, +in case they are isomorphic. +`vf2pp_all_isomorphisms`: to generate all possible mappings between two graphs, +if isomorphic. + +Introduction +------------ +The VF2++ algorithm, follows a similar logic to that of VF2, while also +introducing new easy-to-check cutting rules and determining the optimal access +order of nodes. It is also implemented in a non-recursive manner, which saves +both time and space, when compared to its previous counterpart. + +The optimal node ordering is obtained after taking into consideration both the +degree but also the label rarity of each node. +This way we place the nodes that are more likely to match, first in the order, +thus examining the most promising branches in the beginning. +The rules also consider node labels, making it easier to prune unfruitful +branches early in the process. + +Examples +-------- + +Suppose G1 and G2 are Isomorphic Graphs. Verification is as follows: + +Without node labels: + +>>> import networkx as nx +>>> G1 = nx.path_graph(4) +>>> G2 = nx.path_graph(4) +>>> nx.vf2pp_is_isomorphic(G1, G2, node_label=None) +True +>>> nx.vf2pp_isomorphism(G1, G2, node_label=None) +{1: 1, 2: 2, 0: 0, 3: 3} + +With node labels: + +>>> G1 = nx.path_graph(4) +>>> G2 = nx.path_graph(4) +>>> mapped = {1: 1, 2: 2, 3: 3, 0: 0} +>>> nx.set_node_attributes(G1, dict(zip(G1, ["blue", "red", "green", "yellow"])), "label") +>>> nx.set_node_attributes(G2, dict(zip([mapped[u] for u in G1], ["blue", "red", "green", "yellow"])), "label") +>>> nx.vf2pp_is_isomorphic(G1, G2, node_label="label") +True +>>> nx.vf2pp_isomorphism(G1, G2, node_label="label") +{1: 1, 2: 2, 0: 0, 3: 3} + +References +---------- +.. [1] Jüttner, Alpár & Madarasi, Péter. (2018). "VF2++—An improved subgraph + isomorphism algorithm". Discrete Applied Mathematics. 242. + https://doi.org/10.1016/j.dam.2018.02.018 + +""" +import collections + +import networkx as nx + +__all__ = ["vf2pp_isomorphism", "vf2pp_is_isomorphic", "vf2pp_all_isomorphisms"] + +_GraphParameters = collections.namedtuple( + "_GraphParameters", + [ + "G1", + "G2", + "G1_labels", + "G2_labels", + "nodes_of_G1Labels", + "nodes_of_G2Labels", + "G2_nodes_of_degree", + ], +) + +_StateParameters = collections.namedtuple( + "_StateParameters", + [ + "mapping", + "reverse_mapping", + "T1", + "T1_in", + "T1_tilde", + "T1_tilde_in", + "T2", + "T2_in", + "T2_tilde", + "T2_tilde_in", + ], +) + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None): + """Return an isomorphic mapping between `G1` and `G2` if it exists. + + Parameters + ---------- + G1, G2 : NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism. + + node_label : str, optional + The name of the node attribute to be used when comparing nodes. + The default is `None`, meaning node attributes are not considered + in the comparison. Any node that doesn't have the `node_label` + attribute uses `default_label` instead. + + default_label : scalar + Default value to use when a node doesn't have an attribute + named `node_label`. Default is `None`. + + Returns + ------- + dict or None + Node mapping if the two graphs are isomorphic. None otherwise. + """ + try: + mapping = next(vf2pp_all_isomorphisms(G1, G2, node_label, default_label)) + return mapping + except StopIteration: + return None + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None): + """Examines whether G1 and G2 are isomorphic. + + Parameters + ---------- + G1, G2 : NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism. + + node_label : str, optional + The name of the node attribute to be used when comparing nodes. + The default is `None`, meaning node attributes are not considered + in the comparison. Any node that doesn't have the `node_label` + attribute uses `default_label` instead. + + default_label : scalar + Default value to use when a node doesn't have an attribute + named `node_label`. Default is `None`. + + Returns + ------- + bool + True if the two graphs are isomorphic, False otherwise. + """ + if vf2pp_isomorphism(G1, G2, node_label, default_label) is not None: + return True + return False + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None): + """Yields all the possible mappings between G1 and G2. + + Parameters + ---------- + G1, G2 : NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism. + + node_label : str, optional + The name of the node attribute to be used when comparing nodes. + The default is `None`, meaning node attributes are not considered + in the comparison. Any node that doesn't have the `node_label` + attribute uses `default_label` instead. + + default_label : scalar + Default value to use when a node doesn't have an attribute + named `node_label`. Default is `None`. + + Yields + ------ + dict + Isomorphic mapping between the nodes in `G1` and `G2`. + """ + if G1.number_of_nodes() == 0 or G2.number_of_nodes() == 0: + return False + + # Create the degree dicts based on graph type + if G1.is_directed(): + G1_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree) + } + G2_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(G2.in_degree, G2.out_degree) + } + else: + G1_degree = dict(G1.degree) + G2_degree = dict(G2.degree) + + if not G1.is_directed(): + find_candidates = _find_candidates + restore_Tinout = _restore_Tinout + else: + find_candidates = _find_candidates_Di + restore_Tinout = _restore_Tinout_Di + + # Check that both graphs have the same number of nodes and degree sequence + if G1.order() != G2.order(): + return False + if sorted(G1_degree.values()) != sorted(G2_degree.values()): + return False + + # Initialize parameters and cache necessary information about degree and labels + graph_params, state_params = _initialize_parameters( + G1, G2, G2_degree, node_label, default_label + ) + + # Check if G1 and G2 have the same labels, and that number of nodes per label is equal between the two graphs + if not _precheck_label_properties(graph_params): + return False + + # Calculate the optimal node ordering + node_order = _matching_order(graph_params) + + # Initialize the stack + stack = [] + candidates = iter( + find_candidates(node_order[0], graph_params, state_params, G1_degree) + ) + stack.append((node_order[0], candidates)) + + mapping = state_params.mapping + reverse_mapping = state_params.reverse_mapping + + # Index of the node from the order, currently being examined + matching_node = 1 + + while stack: + current_node, candidate_nodes = stack[-1] + + try: + candidate = next(candidate_nodes) + except StopIteration: + # If no remaining candidates, return to a previous state, and follow another branch + stack.pop() + matching_node -= 1 + if stack: + # Pop the previously added u-v pair, and look for a different candidate _v for u + popped_node1, _ = stack[-1] + popped_node2 = mapping[popped_node1] + mapping.pop(popped_node1) + reverse_mapping.pop(popped_node2) + restore_Tinout(popped_node1, popped_node2, graph_params, state_params) + continue + + if _feasibility(current_node, candidate, graph_params, state_params): + # Terminate if mapping is extended to its full + if len(mapping) == G2.number_of_nodes() - 1: + cp_mapping = mapping.copy() + cp_mapping[current_node] = candidate + yield cp_mapping + continue + + # Feasibility rules pass, so extend the mapping and update the parameters + mapping[current_node] = candidate + reverse_mapping[candidate] = current_node + _update_Tinout(current_node, candidate, graph_params, state_params) + # Append the next node and its candidates to the stack + candidates = iter( + find_candidates( + node_order[matching_node], graph_params, state_params, G1_degree + ) + ) + stack.append((node_order[matching_node], candidates)) + matching_node += 1 + + +def _precheck_label_properties(graph_params): + G1, G2, G1_labels, G2_labels, nodes_of_G1Labels, nodes_of_G2Labels, _ = graph_params + if any( + label not in nodes_of_G1Labels or len(nodes_of_G1Labels[label]) != len(nodes) + for label, nodes in nodes_of_G2Labels.items() + ): + return False + return True + + +def _initialize_parameters(G1, G2, G2_degree, node_label=None, default_label=-1): + """Initializes all the necessary parameters for VF2++ + + Parameters + ---------- + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + Returns + ------- + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2 + G1_labels,G2_labels: dict + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are + neighbors of nodes that are. + + T1_out, T2_out: set + Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti + """ + G1_labels = dict(G1.nodes(data=node_label, default=default_label)) + G2_labels = dict(G2.nodes(data=node_label, default=default_label)) + + graph_params = _GraphParameters( + G1, + G2, + G1_labels, + G2_labels, + nx.utils.groups(G1_labels), + nx.utils.groups(G2_labels), + nx.utils.groups(G2_degree), + ) + + T1, T1_in = set(), set() + T2, T2_in = set(), set() + if G1.is_directed(): + T1_tilde, T1_tilde_in = ( + set(G1.nodes()), + set(), + ) # todo: do we need Ti_tilde_in? What nodes does it have? + T2_tilde, T2_tilde_in = set(G2.nodes()), set() + else: + T1_tilde, T1_tilde_in = set(G1.nodes()), set() + T2_tilde, T2_tilde_in = set(G2.nodes()), set() + + state_params = _StateParameters( + {}, + {}, + T1, + T1_in, + T1_tilde, + T1_tilde_in, + T2, + T2_in, + T2_tilde, + T2_tilde_in, + ) + + return graph_params, state_params + + +def _matching_order(graph_params): + """The node ordering as introduced in VF2++. + + Notes + ----- + Taking into account the structure of the Graph and the node labeling, the nodes are placed in an order such that, + most of the unfruitful/infeasible branches of the search space can be pruned on high levels, significantly + decreasing the number of visited states. The premise is that, the algorithm will be able to recognize + inconsistencies early, proceeding to go deep into the search tree only if it's needed. + + Parameters + ---------- + graph_params: namedtuple + Contains: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism. + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively. + + Returns + ------- + node_order: list + The ordering of the nodes. + """ + G1, G2, G1_labels, _, _, nodes_of_G2Labels, _ = graph_params + if not G1 and not G2: + return {} + + if G1.is_directed(): + G1 = G1.to_undirected(as_view=True) + + V1_unordered = set(G1.nodes()) + label_rarity = {label: len(nodes) for label, nodes in nodes_of_G2Labels.items()} + used_degrees = {node: 0 for node in G1} + node_order = [] + + while V1_unordered: + max_rarity = min(label_rarity[G1_labels[x]] for x in V1_unordered) + rarest_nodes = [ + n for n in V1_unordered if label_rarity[G1_labels[n]] == max_rarity + ] + max_node = max(rarest_nodes, key=G1.degree) + + for dlevel_nodes in nx.bfs_layers(G1, max_node): + nodes_to_add = dlevel_nodes.copy() + while nodes_to_add: + max_used_degree = max(used_degrees[n] for n in nodes_to_add) + max_used_degree_nodes = [ + n for n in nodes_to_add if used_degrees[n] == max_used_degree + ] + max_degree = max(G1.degree[n] for n in max_used_degree_nodes) + max_degree_nodes = [ + n for n in max_used_degree_nodes if G1.degree[n] == max_degree + ] + next_node = min( + max_degree_nodes, key=lambda x: label_rarity[G1_labels[x]] + ) + + node_order.append(next_node) + for node in G1.neighbors(next_node): + used_degrees[node] += 1 + + nodes_to_add.remove(next_node) + label_rarity[G1_labels[next_node]] -= 1 + V1_unordered.discard(next_node) + + return node_order + + +def _find_candidates( + u, graph_params, state_params, G1_degree +): # todo: make the 4th argument the degree of u + """Given node u of G1, finds the candidates of u from G2. + + Parameters + ---------- + u: Graph node + The node from G1 for which to find the candidates from G2. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are + neighbors of nodes that are. + + T1_tilde, T2_tilde: set + Ti_tilde contains all the nodes from Gi, that are neither in the mapping nor in Ti + + Returns + ------- + candidates: set + The nodes from G2 which are candidates for u. + """ + G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params + mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params + + covered_nbrs = [nbr for nbr in G1[u] if nbr in mapping] + if not covered_nbrs: + candidates = set(nodes_of_G2Labels[G1_labels[u]]) + candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]]) + candidates.intersection_update(T2_tilde) + candidates.difference_update(reverse_mapping) + if G1.is_multigraph(): + candidates.difference_update( + { + node + for node in candidates + if G1.number_of_edges(u, u) != G2.number_of_edges(node, node) + } + ) + return candidates + + nbr1 = covered_nbrs[0] + common_nodes = set(G2[mapping[nbr1]]) + + for nbr1 in covered_nbrs[1:]: + common_nodes.intersection_update(G2[mapping[nbr1]]) + + common_nodes.difference_update(reverse_mapping) + common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]]) + common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]]) + if G1.is_multigraph(): + common_nodes.difference_update( + { + node + for node in common_nodes + if G1.number_of_edges(u, u) != G2.number_of_edges(node, node) + } + ) + return common_nodes + + +def _find_candidates_Di(u, graph_params, state_params, G1_degree): + G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params + mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params + + covered_successors = [succ for succ in G1[u] if succ in mapping] + covered_predecessors = [pred for pred in G1.pred[u] if pred in mapping] + + if not (covered_successors or covered_predecessors): + candidates = set(nodes_of_G2Labels[G1_labels[u]]) + candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]]) + candidates.intersection_update(T2_tilde) + candidates.difference_update(reverse_mapping) + if G1.is_multigraph(): + candidates.difference_update( + { + node + for node in candidates + if G1.number_of_edges(u, u) != G2.number_of_edges(node, node) + } + ) + return candidates + + if covered_successors: + succ1 = covered_successors[0] + common_nodes = set(G2.pred[mapping[succ1]]) + + for succ1 in covered_successors[1:]: + common_nodes.intersection_update(G2.pred[mapping[succ1]]) + else: + pred1 = covered_predecessors.pop() + common_nodes = set(G2[mapping[pred1]]) + + for pred1 in covered_predecessors: + common_nodes.intersection_update(G2[mapping[pred1]]) + + common_nodes.difference_update(reverse_mapping) + common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]]) + common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]]) + if G1.is_multigraph(): + common_nodes.difference_update( + { + node + for node in common_nodes + if G1.number_of_edges(u, u) != G2.number_of_edges(node, node) + } + ) + return common_nodes + + +def _feasibility(node1, node2, graph_params, state_params): + """Given a candidate pair of nodes u and v from G1 and G2 respectively, checks if it's feasible to extend the + mapping, i.e. if u and v can be matched. + + Notes + ----- + This function performs all the necessary checking by applying both consistency and cutting rules. + + Parameters + ---------- + node1, node2: Graph node + The candidate pair of nodes being checked for matching + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are + neighbors of nodes that are. + + T1_out, T2_out: set + Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti + + Returns + ------- + True if all checks are successful, False otherwise. + """ + G1 = graph_params.G1 + + if _cut_PT(node1, node2, graph_params, state_params): + return False + + if G1.is_multigraph(): + if not _consistent_PT(node1, node2, graph_params, state_params): + return False + + return True + + +def _cut_PT(u, v, graph_params, state_params): + """Implements the cutting rules for the ISO problem. + + Parameters + ---------- + u, v: Graph node + The two candidate nodes being examined. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are + neighbors of nodes that are. + + T1_tilde, T2_tilde: set + Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti + + Returns + ------- + True if we should prune this branch, i.e. the node pair failed the cutting checks. False otherwise. + """ + G1, G2, G1_labels, G2_labels, _, _, _ = graph_params + ( + _, + _, + T1, + T1_in, + T1_tilde, + _, + T2, + T2_in, + T2_tilde, + _, + ) = state_params + + u_labels_predecessors, v_labels_predecessors = {}, {} + if G1.is_directed(): + u_labels_predecessors = nx.utils.groups( + {n1: G1_labels[n1] for n1 in G1.pred[u]} + ) + v_labels_predecessors = nx.utils.groups( + {n2: G2_labels[n2] for n2 in G2.pred[v]} + ) + + if set(u_labels_predecessors.keys()) != set(v_labels_predecessors.keys()): + return True + + u_labels_successors = nx.utils.groups({n1: G1_labels[n1] for n1 in G1[u]}) + v_labels_successors = nx.utils.groups({n2: G2_labels[n2] for n2 in G2[v]}) + + # if the neighbors of u, do not have the same labels as those of v, NOT feasible. + if set(u_labels_successors.keys()) != set(v_labels_successors.keys()): + return True + + for label, G1_nbh in u_labels_successors.items(): + G2_nbh = v_labels_successors[label] + + if G1.is_multigraph(): + # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2 + u_nbrs_edges = sorted(G1.number_of_edges(u, x) for x in G1_nbh) + v_nbrs_edges = sorted(G2.number_of_edges(v, x) for x in G2_nbh) + if any( + u_nbr_edges != v_nbr_edges + for u_nbr_edges, v_nbr_edges in zip(u_nbrs_edges, v_nbrs_edges) + ): + return True + + if len(T1.intersection(G1_nbh)) != len(T2.intersection(G2_nbh)): + return True + if len(T1_tilde.intersection(G1_nbh)) != len(T2_tilde.intersection(G2_nbh)): + return True + if G1.is_directed() and len(T1_in.intersection(G1_nbh)) != len( + T2_in.intersection(G2_nbh) + ): + return True + + if not G1.is_directed(): + return False + + for label, G1_pred in u_labels_predecessors.items(): + G2_pred = v_labels_predecessors[label] + + if G1.is_multigraph(): + # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2 + u_pred_edges = sorted(G1.number_of_edges(u, x) for x in G1_pred) + v_pred_edges = sorted(G2.number_of_edges(v, x) for x in G2_pred) + if any( + u_nbr_edges != v_nbr_edges + for u_nbr_edges, v_nbr_edges in zip(u_pred_edges, v_pred_edges) + ): + return True + + if len(T1.intersection(G1_pred)) != len(T2.intersection(G2_pred)): + return True + if len(T1_tilde.intersection(G1_pred)) != len(T2_tilde.intersection(G2_pred)): + return True + if len(T1_in.intersection(G1_pred)) != len(T2_in.intersection(G2_pred)): + return True + + return False + + +def _consistent_PT(u, v, graph_params, state_params): + """Checks the consistency of extending the mapping using the current node pair. + + Parameters + ---------- + u, v: Graph node + The two candidate nodes being examined. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are + neighbors of nodes that are. + + T1_out, T2_out: set + Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti + + Returns + ------- + True if the pair passes all the consistency checks successfully. False otherwise. + """ + G1, G2 = graph_params.G1, graph_params.G2 + mapping, reverse_mapping = state_params.mapping, state_params.reverse_mapping + + for neighbor in G1[u]: + if neighbor in mapping: + if G1.number_of_edges(u, neighbor) != G2.number_of_edges( + v, mapping[neighbor] + ): + return False + + for neighbor in G2[v]: + if neighbor in reverse_mapping: + if G1.number_of_edges(u, reverse_mapping[neighbor]) != G2.number_of_edges( + v, neighbor + ): + return False + + if not G1.is_directed(): + return True + + for predecessor in G1.pred[u]: + if predecessor in mapping: + if G1.number_of_edges(predecessor, u) != G2.number_of_edges( + mapping[predecessor], v + ): + return False + + for predecessor in G2.pred[v]: + if predecessor in reverse_mapping: + if G1.number_of_edges( + reverse_mapping[predecessor], u + ) != G2.number_of_edges(predecessor, v): + return False + + return True + + +def _update_Tinout(new_node1, new_node2, graph_params, state_params): + """Updates the Ti/Ti_out (i=1,2) when a new node pair u-v is added to the mapping. + + Notes + ----- + This function should be called right after the feasibility checks are passed, and node1 is mapped to node2. The + purpose of this function is to avoid brute force computing of Ti/Ti_out by iterating over all nodes of the graph + and checking which nodes satisfy the necessary conditions. Instead, in every step of the algorithm we focus + exclusively on the two nodes that are being added to the mapping, incrementally updating Ti/Ti_out. + + Parameters + ---------- + new_node1, new_node2: Graph node + The two new nodes, added to the mapping. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are + neighbors of nodes that are. + + T1_tilde, T2_tilde: set + Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti + """ + G1, G2, _, _, _, _, _ = graph_params + ( + mapping, + reverse_mapping, + T1, + T1_in, + T1_tilde, + T1_tilde_in, + T2, + T2_in, + T2_tilde, + T2_tilde_in, + ) = state_params + + uncovered_successors_G1 = {succ for succ in G1[new_node1] if succ not in mapping} + uncovered_successors_G2 = { + succ for succ in G2[new_node2] if succ not in reverse_mapping + } + + # Add the uncovered neighbors of node1 and node2 in T1 and T2 respectively + T1.update(uncovered_successors_G1) + T2.update(uncovered_successors_G2) + T1.discard(new_node1) + T2.discard(new_node2) + + T1_tilde.difference_update(uncovered_successors_G1) + T2_tilde.difference_update(uncovered_successors_G2) + T1_tilde.discard(new_node1) + T2_tilde.discard(new_node2) + + if not G1.is_directed(): + return + + uncovered_predecessors_G1 = { + pred for pred in G1.pred[new_node1] if pred not in mapping + } + uncovered_predecessors_G2 = { + pred for pred in G2.pred[new_node2] if pred not in reverse_mapping + } + + T1_in.update(uncovered_predecessors_G1) + T2_in.update(uncovered_predecessors_G2) + T1_in.discard(new_node1) + T2_in.discard(new_node2) + + T1_tilde.difference_update(uncovered_predecessors_G1) + T2_tilde.difference_update(uncovered_predecessors_G2) + T1_tilde.discard(new_node1) + T2_tilde.discard(new_node2) + + +def _restore_Tinout(popped_node1, popped_node2, graph_params, state_params): + """Restores the previous version of Ti/Ti_out when a node pair is deleted from the mapping. + + Parameters + ---------- + popped_node1, popped_node2: Graph node + The two nodes deleted from the mapping. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are + neighbors of nodes that are. + + T1_tilde, T2_tilde: set + Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti + """ + # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1. + G1, G2, _, _, _, _, _ = graph_params + ( + mapping, + reverse_mapping, + T1, + T1_in, + T1_tilde, + T1_tilde_in, + T2, + T2_in, + T2_tilde, + T2_tilde_in, + ) = state_params + + is_added = False + for neighbor in G1[popped_node1]: + if neighbor in mapping: + # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1 + is_added = True + T1.add(popped_node1) + else: + # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1 + if any(nbr in mapping for nbr in G1[neighbor]): + continue + T1.discard(neighbor) + T1_tilde.add(neighbor) + + # Case where the node is not present in neither the mapping nor T1. By definition, it should belong to T1_tilde + if not is_added: + T1_tilde.add(popped_node1) + + is_added = False + for neighbor in G2[popped_node2]: + if neighbor in reverse_mapping: + is_added = True + T2.add(popped_node2) + else: + if any(nbr in reverse_mapping for nbr in G2[neighbor]): + continue + T2.discard(neighbor) + T2_tilde.add(neighbor) + + if not is_added: + T2_tilde.add(popped_node2) + + +def _restore_Tinout_Di(popped_node1, popped_node2, graph_params, state_params): + # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1. + G1, G2, _, _, _, _, _ = graph_params + ( + mapping, + reverse_mapping, + T1, + T1_in, + T1_tilde, + T1_tilde_in, + T2, + T2_in, + T2_tilde, + T2_tilde_in, + ) = state_params + + is_added = False + for successor in G1[popped_node1]: + if successor in mapping: + # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1 + is_added = True + T1_in.add(popped_node1) + else: + # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1 + if not any(pred in mapping for pred in G1.pred[successor]): + T1.discard(successor) + + if not any(succ in mapping for succ in G1[successor]): + T1_in.discard(successor) + + if successor not in T1: + if successor not in T1_in: + T1_tilde.add(successor) + + for predecessor in G1.pred[popped_node1]: + if predecessor in mapping: + # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1 + is_added = True + T1.add(popped_node1) + else: + # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1 + if not any(pred in mapping for pred in G1.pred[predecessor]): + T1.discard(predecessor) + + if not any(succ in mapping for succ in G1[predecessor]): + T1_in.discard(predecessor) + + if not (predecessor in T1 or predecessor in T1_in): + T1_tilde.add(predecessor) + + # Case where the node is not present in neither the mapping nor T1. By definition it should belong to T1_tilde + if not is_added: + T1_tilde.add(popped_node1) + + is_added = False + for successor in G2[popped_node2]: + if successor in reverse_mapping: + is_added = True + T2_in.add(popped_node2) + else: + if not any(pred in reverse_mapping for pred in G2.pred[successor]): + T2.discard(successor) + + if not any(succ in reverse_mapping for succ in G2[successor]): + T2_in.discard(successor) + + if successor not in T2: + if successor not in T2_in: + T2_tilde.add(successor) + + for predecessor in G2.pred[popped_node2]: + if predecessor in reverse_mapping: + # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1 + is_added = True + T2.add(popped_node2) + else: + # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1 + if not any(pred in reverse_mapping for pred in G2.pred[predecessor]): + T2.discard(predecessor) + + if not any(succ in reverse_mapping for succ in G2[predecessor]): + T2_in.discard(predecessor) + + if not (predecessor in T2 or predecessor in T2_in): + T2_tilde.add(predecessor) + + if not is_added: + T2_tilde.add(popped_node2) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py new file mode 100644 index 0000000000000000000000000000000000000000..9484edc042ac127451b7141f9da659e3e429b679 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py @@ -0,0 +1,192 @@ +""" + Module to simplify the specification of user-defined equality functions for + node and edge attributes during isomorphism checks. + + During the construction of an isomorphism, the algorithm considers two + candidate nodes n1 in G1 and n2 in G2. The graphs G1 and G2 are then + compared with respect to properties involving n1 and n2, and if the outcome + is good, then the candidate nodes are considered isomorphic. NetworkX + provides a simple mechanism for users to extend the comparisons to include + node and edge attributes. + + Node attributes are handled by the node_match keyword. When considering + n1 and n2, the algorithm passes their node attribute dictionaries to + node_match, and if it returns False, then n1 and n2 cannot be + considered to be isomorphic. + + Edge attributes are handled by the edge_match keyword. When considering + n1 and n2, the algorithm must verify that outgoing edges from n1 are + commensurate with the outgoing edges for n2. If the graph is directed, + then a similar check is also performed for incoming edges. + + Focusing only on outgoing edges, we consider pairs of nodes (n1, v1) from + G1 and (n2, v2) from G2. For graphs and digraphs, there is only one edge + between (n1, v1) and only one edge between (n2, v2). Those edge attribute + dictionaries are passed to edge_match, and if it returns False, then + n1 and n2 cannot be considered isomorphic. For multigraphs and + multidigraphs, there can be multiple edges between (n1, v1) and also + multiple edges between (n2, v2). Now, there must exist an isomorphism + from "all the edges between (n1, v1)" to "all the edges between (n2, v2)". + So, all of the edge attribute dictionaries are passed to edge_match, and + it must determine if there is an isomorphism between the two sets of edges. +""" + +from . import isomorphvf2 as vf2 + +__all__ = ["GraphMatcher", "DiGraphMatcher", "MultiGraphMatcher", "MultiDiGraphMatcher"] + + +def _semantic_feasibility(self, G1_node, G2_node): + """Returns True if mapping G1_node to G2_node is semantically feasible.""" + # Make sure the nodes match + if self.node_match is not None: + nm = self.node_match(self.G1.nodes[G1_node], self.G2.nodes[G2_node]) + if not nm: + return False + + # Make sure the edges match + if self.edge_match is not None: + # Cached lookups + G1nbrs = self.G1_adj[G1_node] + G2nbrs = self.G2_adj[G2_node] + core_1 = self.core_1 + edge_match = self.edge_match + + for neighbor in G1nbrs: + # G1_node is not in core_1, so we must handle R_self separately + if neighbor == G1_node: + if G2_node in G2nbrs and not edge_match( + G1nbrs[G1_node], G2nbrs[G2_node] + ): + return False + elif neighbor in core_1: + G2_nbr = core_1[neighbor] + if G2_nbr in G2nbrs and not edge_match( + G1nbrs[neighbor], G2nbrs[G2_nbr] + ): + return False + # syntactic check has already verified that neighbors are symmetric + + return True + + +class GraphMatcher(vf2.GraphMatcher): + """VF2 isomorphism checker for undirected graphs.""" + + def __init__(self, G1, G2, node_match=None, edge_match=None): + """Initialize graph matcher. + + Parameters + ---------- + G1, G2: graph + The graphs to be tested. + + node_match: callable + A function that returns True iff node n1 in G1 and n2 in G2 + should be considered equal during the isomorphism test. The + function will be called like:: + + node_match(G1.nodes[n1], G2.nodes[n2]) + + That is, the function will receive the node attribute dictionaries + of the nodes under consideration. If None, then no attributes are + considered when testing for an isomorphism. + + edge_match: callable + A function that returns True iff the edge attribute dictionary for + the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be + considered equal during the isomorphism test. The function will be + called like:: + + edge_match(G1[u1][v1], G2[u2][v2]) + + That is, the function will receive the edge attribute dictionaries + of the edges under consideration. If None, then no attributes are + considered when testing for an isomorphism. + + """ + vf2.GraphMatcher.__init__(self, G1, G2) + + self.node_match = node_match + self.edge_match = edge_match + + # These will be modified during checks to minimize code repeat. + self.G1_adj = self.G1.adj + self.G2_adj = self.G2.adj + + semantic_feasibility = _semantic_feasibility + + +class DiGraphMatcher(vf2.DiGraphMatcher): + """VF2 isomorphism checker for directed graphs.""" + + def __init__(self, G1, G2, node_match=None, edge_match=None): + """Initialize graph matcher. + + Parameters + ---------- + G1, G2 : graph + The graphs to be tested. + + node_match : callable + A function that returns True iff node n1 in G1 and n2 in G2 + should be considered equal during the isomorphism test. The + function will be called like:: + + node_match(G1.nodes[n1], G2.nodes[n2]) + + That is, the function will receive the node attribute dictionaries + of the nodes under consideration. If None, then no attributes are + considered when testing for an isomorphism. + + edge_match : callable + A function that returns True iff the edge attribute dictionary for + the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be + considered equal during the isomorphism test. The function will be + called like:: + + edge_match(G1[u1][v1], G2[u2][v2]) + + That is, the function will receive the edge attribute dictionaries + of the edges under consideration. If None, then no attributes are + considered when testing for an isomorphism. + + """ + vf2.DiGraphMatcher.__init__(self, G1, G2) + + self.node_match = node_match + self.edge_match = edge_match + + # These will be modified during checks to minimize code repeat. + self.G1_adj = self.G1.adj + self.G2_adj = self.G2.adj + + def semantic_feasibility(self, G1_node, G2_node): + """Returns True if mapping G1_node to G2_node is semantically feasible.""" + + # Test node_match and also test edge_match on successors + feasible = _semantic_feasibility(self, G1_node, G2_node) + if not feasible: + return False + + # Test edge_match on predecessors + self.G1_adj = self.G1.pred + self.G2_adj = self.G2.pred + feasible = _semantic_feasibility(self, G1_node, G2_node) + self.G1_adj = self.G1.adj + self.G2_adj = self.G2.adj + + return feasible + + +# The "semantics" of edge_match are different for multi(di)graphs, but +# the implementation is the same. So, technically we do not need to +# provide "multi" versions, but we do so to match NetworkX's base classes. + + +class MultiGraphMatcher(GraphMatcher): + """VF2 isomorphism checker for undirected multigraphs.""" + + +class MultiDiGraphMatcher(DiGraphMatcher): + """VF2 isomorphism checker for directed multigraphs.""" diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/link_prediction.py b/venv/lib/python3.10/site-packages/networkx/algorithms/link_prediction.py new file mode 100644 index 0000000000000000000000000000000000000000..1fb24243a1669ba5900c7decf04516130dcbafce --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/link_prediction.py @@ -0,0 +1,688 @@ +""" +Link prediction algorithms. +""" + + +from math import log + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "resource_allocation_index", + "jaccard_coefficient", + "adamic_adar_index", + "preferential_attachment", + "cn_soundarajan_hopcroft", + "ra_index_soundarajan_hopcroft", + "within_inter_cluster", + "common_neighbor_centrality", +] + + +def _apply_prediction(G, func, ebunch=None): + """Applies the given function to each edge in the specified iterable + of edges. + + `G` is an instance of :class:`networkx.Graph`. + + `func` is a function on two inputs, each of which is a node in the + graph. The function can return anything, but it should return a + value representing a prediction of the likelihood of a "link" + joining the two nodes. + + `ebunch` is an iterable of pairs of nodes. If not specified, all + non-edges in the graph `G` will be used. + + """ + if ebunch is None: + ebunch = nx.non_edges(G) + else: + for u, v in ebunch: + if u not in G: + raise nx.NodeNotFound(f"Node {u} not in G.") + if v not in G: + raise nx.NodeNotFound(f"Node {v} not in G.") + return ((u, v, func(u, v)) for u, v in ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def resource_allocation_index(G, ebunch=None): + r"""Compute the resource allocation index of all node pairs in ebunch. + + Resource allocation index of `u` and `v` is defined as + + .. math:: + + \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{|\Gamma(w)|} + + where $\Gamma(u)$ denotes the set of neighbors of $u$. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Resource allocation index will be computed for each pair of + nodes given in the iterable. The pairs must be given as + 2-tuples (u, v) where u and v are nodes in the graph. If ebunch + is None then all nonexistent edges in the graph will be used. + Default value: None. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their resource allocation index. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.resource_allocation_index(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 0.75000000 + (2, 3) -> 0.75000000 + + References + ---------- + .. [1] T. Zhou, L. Lu, Y.-C. Zhang. + Predicting missing links via local information. + Eur. Phys. J. B 71 (2009) 623. + https://arxiv.org/pdf/0901.0553.pdf + """ + + def predict(u, v): + return sum(1 / G.degree(w) for w in nx.common_neighbors(G, u, v)) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def jaccard_coefficient(G, ebunch=None): + r"""Compute the Jaccard coefficient of all node pairs in ebunch. + + Jaccard coefficient of nodes `u` and `v` is defined as + + .. math:: + + \frac{|\Gamma(u) \cap \Gamma(v)|}{|\Gamma(u) \cup \Gamma(v)|} + + where $\Gamma(u)$ denotes the set of neighbors of $u$. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Jaccard coefficient will be computed for each pair of nodes + given in the iterable. The pairs must be given as 2-tuples + (u, v) where u and v are nodes in the graph. If ebunch is None + then all nonexistent edges in the graph will be used. + Default value: None. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their Jaccard coefficient. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.jaccard_coefficient(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 0.60000000 + (2, 3) -> 0.60000000 + + References + ---------- + .. [1] D. Liben-Nowell, J. Kleinberg. + The Link Prediction Problem for Social Networks (2004). + http://www.cs.cornell.edu/home/kleinber/link-pred.pdf + """ + + def predict(u, v): + union_size = len(set(G[u]) | set(G[v])) + if union_size == 0: + return 0 + return len(nx.common_neighbors(G, u, v)) / union_size + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def adamic_adar_index(G, ebunch=None): + r"""Compute the Adamic-Adar index of all node pairs in ebunch. + + Adamic-Adar index of `u` and `v` is defined as + + .. math:: + + \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{\log |\Gamma(w)|} + + where $\Gamma(u)$ denotes the set of neighbors of $u$. + This index leads to zero-division for nodes only connected via self-loops. + It is intended to be used when no self-loops are present. + + Parameters + ---------- + G : graph + NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Adamic-Adar index will be computed for each pair of nodes given + in the iterable. The pairs must be given as 2-tuples (u, v) + where u and v are nodes in the graph. If ebunch is None then all + nonexistent edges in the graph will be used. + Default value: None. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their Adamic-Adar index. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.adamic_adar_index(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 2.16404256 + (2, 3) -> 2.16404256 + + References + ---------- + .. [1] D. Liben-Nowell, J. Kleinberg. + The Link Prediction Problem for Social Networks (2004). + http://www.cs.cornell.edu/home/kleinber/link-pred.pdf + """ + + def predict(u, v): + return sum(1 / log(G.degree(w)) for w in nx.common_neighbors(G, u, v)) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def common_neighbor_centrality(G, ebunch=None, alpha=0.8): + r"""Return the CCPA score for each pair of nodes. + + Compute the Common Neighbor and Centrality based Parameterized Algorithm(CCPA) + score of all node pairs in ebunch. + + CCPA score of `u` and `v` is defined as + + .. math:: + + \alpha \cdot (|\Gamma (u){\cap }^{}\Gamma (v)|)+(1-\alpha )\cdot \frac{N}{{d}_{uv}} + + where $\Gamma(u)$ denotes the set of neighbors of $u$, $\Gamma(v)$ denotes the + set of neighbors of $v$, $\alpha$ is parameter varies between [0,1], $N$ denotes + total number of nodes in the Graph and ${d}_{uv}$ denotes shortest distance + between $u$ and $v$. + + This algorithm is based on two vital properties of nodes, namely the number + of common neighbors and their centrality. Common neighbor refers to the common + nodes between two nodes. Centrality refers to the prestige that a node enjoys + in a network. + + .. seealso:: + + :func:`common_neighbors` + + Parameters + ---------- + G : graph + NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Preferential attachment score will be computed for each pair of + nodes given in the iterable. The pairs must be given as + 2-tuples (u, v) where u and v are nodes in the graph. If ebunch + is None then all nonexistent edges in the graph will be used. + Default value: None. + + alpha : Parameter defined for participation of Common Neighbor + and Centrality Algorithm share. Values for alpha should + normally be between 0 and 1. Default value set to 0.8 + because author found better performance at 0.8 for all the + dataset. + Default value: 0.8 + + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their Common Neighbor and Centrality based + Parameterized Algorithm(CCPA) score. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If self loops exsists in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.common_neighbor_centrality(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p}") + (0, 1) -> 3.4000000000000004 + (2, 3) -> 3.4000000000000004 + + References + ---------- + .. [1] Ahmad, I., Akhtar, M.U., Noor, S. et al. + Missing Link Prediction using Common Neighbor and Centrality based Parameterized Algorithm. + Sci Rep 10, 364 (2020). + https://doi.org/10.1038/s41598-019-57304-y + """ + + # When alpha == 1, the CCPA score simplifies to the number of common neighbors. + if alpha == 1: + + def predict(u, v): + if u == v: + raise nx.NetworkXAlgorithmError("Self loops are not supported") + + return len(nx.common_neighbors(G, u, v)) + + else: + spl = dict(nx.shortest_path_length(G)) + inf = float("inf") + + def predict(u, v): + if u == v: + raise nx.NetworkXAlgorithmError("Self loops are not supported") + path_len = spl[u].get(v, inf) + + n_nbrs = len(nx.common_neighbors(G, u, v)) + return alpha * n_nbrs + (1 - alpha) * len(G) / path_len + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def preferential_attachment(G, ebunch=None): + r"""Compute the preferential attachment score of all node pairs in ebunch. + + Preferential attachment score of `u` and `v` is defined as + + .. math:: + + |\Gamma(u)| |\Gamma(v)| + + where $\Gamma(u)$ denotes the set of neighbors of $u$. + + Parameters + ---------- + G : graph + NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Preferential attachment score will be computed for each pair of + nodes given in the iterable. The pairs must be given as + 2-tuples (u, v) where u and v are nodes in the graph. If ebunch + is None then all nonexistent edges in the graph will be used. + Default value: None. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their preferential attachment score. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.preferential_attachment(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p}") + (0, 1) -> 16 + (2, 3) -> 16 + + References + ---------- + .. [1] D. Liben-Nowell, J. Kleinberg. + The Link Prediction Problem for Social Networks (2004). + http://www.cs.cornell.edu/home/kleinber/link-pred.pdf + """ + + def predict(u, v): + return G.degree(u) * G.degree(v) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(node_attrs="community") +def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): + r"""Count the number of common neighbors of all node pairs in ebunch + using community information. + + For two nodes $u$ and $v$, this function computes the number of + common neighbors and bonus one for each common neighbor belonging to + the same community as $u$ and $v$. Mathematically, + + .. math:: + + |\Gamma(u) \cap \Gamma(v)| + \sum_{w \in \Gamma(u) \cap \Gamma(v)} f(w) + + where $f(w)$ equals 1 if $w$ belongs to the same community as $u$ + and $v$ or 0 otherwise and $\Gamma(u)$ denotes the set of + neighbors of $u$. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + The score will be computed for each pair of nodes given in the + iterable. The pairs must be given as 2-tuples (u, v) where u + and v are nodes in the graph. If ebunch is None then all + nonexistent edges in the graph will be used. + Default value: None. + + community : string, optional (default = 'community') + Nodes attribute name containing the community information. + G[u][community] identifies which community u belongs to. Each + node belongs to at most one community. Default value: 'community'. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their score. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 0 + >>> G.nodes[2]["community"] = 0 + >>> preds = nx.cn_soundarajan_hopcroft(G, [(0, 2)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p}") + (0, 2) -> 2 + + References + ---------- + .. [1] Sucheta Soundarajan and John Hopcroft. + Using community information to improve the precision of link + prediction methods. + In Proceedings of the 21st international conference companion on + World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608. + http://doi.acm.org/10.1145/2187980.2188150 + """ + + def predict(u, v): + Cu = _community(G, u, community) + Cv = _community(G, v, community) + cnbors = nx.common_neighbors(G, u, v) + neighbors = ( + sum(_community(G, w, community) == Cu for w in cnbors) if Cu == Cv else 0 + ) + return len(cnbors) + neighbors + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(node_attrs="community") +def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"): + r"""Compute the resource allocation index of all node pairs in + ebunch using community information. + + For two nodes $u$ and $v$, this function computes the resource + allocation index considering only common neighbors belonging to the + same community as $u$ and $v$. Mathematically, + + .. math:: + + \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{f(w)}{|\Gamma(w)|} + + where $f(w)$ equals 1 if $w$ belongs to the same community as $u$ + and $v$ or 0 otherwise and $\Gamma(u)$ denotes the set of + neighbors of $u$. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + The score will be computed for each pair of nodes given in the + iterable. The pairs must be given as 2-tuples (u, v) where u + and v are nodes in the graph. If ebunch is None then all + nonexistent edges in the graph will be used. + Default value: None. + + community : string, optional (default = 'community') + Nodes attribute name containing the community information. + G[u][community] identifies which community u belongs to. Each + node belongs to at most one community. Default value: 'community'. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their score. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 0 + >>> G.nodes[2]["community"] = 1 + >>> G.nodes[3]["community"] = 0 + >>> preds = nx.ra_index_soundarajan_hopcroft(G, [(0, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 3) -> 0.50000000 + + References + ---------- + .. [1] Sucheta Soundarajan and John Hopcroft. + Using community information to improve the precision of link + prediction methods. + In Proceedings of the 21st international conference companion on + World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608. + http://doi.acm.org/10.1145/2187980.2188150 + """ + + def predict(u, v): + Cu = _community(G, u, community) + Cv = _community(G, v, community) + if Cu != Cv: + return 0 + cnbors = nx.common_neighbors(G, u, v) + return sum(1 / G.degree(w) for w in cnbors if _community(G, w, community) == Cu) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(node_attrs="community") +def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"): + """Compute the ratio of within- and inter-cluster common neighbors + of all node pairs in ebunch. + + For two nodes `u` and `v`, if a common neighbor `w` belongs to the + same community as them, `w` is considered as within-cluster common + neighbor of `u` and `v`. Otherwise, it is considered as + inter-cluster common neighbor of `u` and `v`. The ratio between the + size of the set of within- and inter-cluster common neighbors is + defined as the WIC measure. [1]_ + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + The WIC measure will be computed for each pair of nodes given in + the iterable. The pairs must be given as 2-tuples (u, v) where + u and v are nodes in the graph. If ebunch is None then all + nonexistent edges in the graph will be used. + Default value: None. + + delta : float, optional (default = 0.001) + Value to prevent division by zero in case there is no + inter-cluster common neighbor between two nodes. See [1]_ for + details. Default value: 0.001. + + community : string, optional (default = 'community') + Nodes attribute name containing the community information. + G[u][community] identifies which community u belongs to. Each + node belongs to at most one community. Default value: 'community'. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their WIC measure. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + - If `delta` is less than or equal to zero. + - If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 4), (2, 4), (3, 4)]) + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 1 + >>> G.nodes[2]["community"] = 0 + >>> G.nodes[3]["community"] = 0 + >>> G.nodes[4]["community"] = 0 + >>> preds = nx.within_inter_cluster(G, [(0, 4)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 4) -> 1.99800200 + >>> preds = nx.within_inter_cluster(G, [(0, 4)], delta=0.5) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 4) -> 1.33333333 + + References + ---------- + .. [1] Jorge Carlos Valverde-Rebaza and Alneu de Andrade Lopes. + Link prediction in complex networks based on cluster information. + In Proceedings of the 21st Brazilian conference on Advances in + Artificial Intelligence (SBIA'12) + https://doi.org/10.1007/978-3-642-34459-6_10 + """ + if delta <= 0: + raise nx.NetworkXAlgorithmError("Delta must be greater than zero") + + def predict(u, v): + Cu = _community(G, u, community) + Cv = _community(G, v, community) + if Cu != Cv: + return 0 + cnbors = nx.common_neighbors(G, u, v) + within = {w for w in cnbors if _community(G, w, community) == Cu} + inter = cnbors - within + return len(within) / (len(inter) + delta) + + return _apply_prediction(G, predict, ebunch) + + +def _community(G, u, community): + """Get the community of the given node.""" + node_u = G.nodes[u] + try: + return node_u[community] + except KeyError as err: + raise nx.NetworkXAlgorithmError( + f"No community information available for Node {u}" + ) from err diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/lowest_common_ancestors.py b/venv/lib/python3.10/site-packages/networkx/algorithms/lowest_common_ancestors.py new file mode 100644 index 0000000000000000000000000000000000000000..f695ec208d3dc1c8be7c52c64daea27332d892a0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/lowest_common_ancestors.py @@ -0,0 +1,268 @@ +"""Algorithms for finding the lowest common ancestor of trees and DAGs.""" +from collections import defaultdict +from collections.abc import Mapping, Set +from itertools import combinations_with_replacement + +import networkx as nx +from networkx.utils import UnionFind, arbitrary_element, not_implemented_for + +__all__ = [ + "all_pairs_lowest_common_ancestor", + "tree_all_pairs_lowest_common_ancestor", + "lowest_common_ancestor", +] + + +@not_implemented_for("undirected") +@nx._dispatchable +def all_pairs_lowest_common_ancestor(G, pairs=None): + """Return the lowest common ancestor of all pairs or the provided pairs + + Parameters + ---------- + G : NetworkX directed graph + + pairs : iterable of pairs of nodes, optional (default: all pairs) + The pairs of nodes of interest. + If None, will find the LCA of all pairs of nodes. + + Yields + ------ + ((node1, node2), lca) : 2-tuple + Where lca is least common ancestor of node1 and node2. + Note that for the default case, the order of the node pair is not considered, + e.g. you will not get both ``(a, b)`` and ``(b, a)`` + + Raises + ------ + NetworkXPointlessConcept + If `G` is null. + NetworkXError + If `G` is not a DAG. + + Examples + -------- + The default behavior is to yield the lowest common ancestor for all + possible combinations of nodes in `G`, including self-pairings: + + >>> G = nx.DiGraph([(0, 1), (0, 3), (1, 2)]) + >>> dict(nx.all_pairs_lowest_common_ancestor(G)) + {(0, 0): 0, (0, 1): 0, (0, 3): 0, (0, 2): 0, (1, 1): 1, (1, 3): 0, (1, 2): 1, (3, 3): 3, (3, 2): 0, (2, 2): 2} + + The pairs argument can be used to limit the output to only the + specified node pairings: + + >>> dict(nx.all_pairs_lowest_common_ancestor(G, pairs=[(1, 2), (2, 3)])) + {(1, 2): 1, (2, 3): 0} + + Notes + ----- + Only defined on non-null directed acyclic graphs. + + See Also + -------- + lowest_common_ancestor + """ + if not nx.is_directed_acyclic_graph(G): + raise nx.NetworkXError("LCA only defined on directed acyclic graphs.") + if len(G) == 0: + raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.") + + if pairs is None: + pairs = combinations_with_replacement(G, 2) + else: + # Convert iterator to iterable, if necessary. Trim duplicates. + pairs = dict.fromkeys(pairs) + # Verify that each of the nodes in the provided pairs is in G + nodeset = set(G) + for pair in pairs: + if set(pair) - nodeset: + raise nx.NodeNotFound( + f"Node(s) {set(pair) - nodeset} from pair {pair} not in G." + ) + + # Once input validation is done, construct the generator + def generate_lca_from_pairs(G, pairs): + ancestor_cache = {} + + for v, w in pairs: + if v not in ancestor_cache: + ancestor_cache[v] = nx.ancestors(G, v) + ancestor_cache[v].add(v) + if w not in ancestor_cache: + ancestor_cache[w] = nx.ancestors(G, w) + ancestor_cache[w].add(w) + + common_ancestors = ancestor_cache[v] & ancestor_cache[w] + + if common_ancestors: + common_ancestor = next(iter(common_ancestors)) + while True: + successor = None + for lower_ancestor in G.successors(common_ancestor): + if lower_ancestor in common_ancestors: + successor = lower_ancestor + break + if successor is None: + break + common_ancestor = successor + yield ((v, w), common_ancestor) + + return generate_lca_from_pairs(G, pairs) + + +@not_implemented_for("undirected") +@nx._dispatchable +def lowest_common_ancestor(G, node1, node2, default=None): + """Compute the lowest common ancestor of the given pair of nodes. + + Parameters + ---------- + G : NetworkX directed graph + + node1, node2 : nodes in the graph. + + default : object + Returned if no common ancestor between `node1` and `node2` + + Returns + ------- + The lowest common ancestor of node1 and node2, + or default if they have no common ancestors. + + Examples + -------- + >>> G = nx.DiGraph() + >>> nx.add_path(G, (0, 1, 2, 3)) + >>> nx.add_path(G, (0, 4, 3)) + >>> nx.lowest_common_ancestor(G, 2, 4) + 0 + + See Also + -------- + all_pairs_lowest_common_ancestor""" + + ans = list(all_pairs_lowest_common_ancestor(G, pairs=[(node1, node2)])) + if ans: + assert len(ans) == 1 + return ans[0][1] + return default + + +@not_implemented_for("undirected") +@nx._dispatchable +def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None): + r"""Yield the lowest common ancestor for sets of pairs in a tree. + + Parameters + ---------- + G : NetworkX directed graph (must be a tree) + + root : node, optional (default: None) + The root of the subtree to operate on. + If None, assume the entire graph has exactly one source and use that. + + pairs : iterable or iterator of pairs of nodes, optional (default: None) + The pairs of interest. If None, Defaults to all pairs of nodes + under `root` that have a lowest common ancestor. + + Returns + ------- + lcas : generator of tuples `((u, v), lca)` where `u` and `v` are nodes + in `pairs` and `lca` is their lowest common ancestor. + + Examples + -------- + >>> import pprint + >>> G = nx.DiGraph([(1, 3), (2, 4), (1, 2)]) + >>> pprint.pprint(dict(nx.tree_all_pairs_lowest_common_ancestor(G))) + {(1, 1): 1, + (2, 1): 1, + (2, 2): 2, + (3, 1): 1, + (3, 2): 1, + (3, 3): 3, + (3, 4): 1, + (4, 1): 1, + (4, 2): 2, + (4, 4): 4} + + We can also use `pairs` argument to specify the pairs of nodes for which we + want to compute lowest common ancestors. Here is an example: + + >>> dict(nx.tree_all_pairs_lowest_common_ancestor(G, pairs=[(1, 4), (2, 3)])) + {(2, 3): 1, (1, 4): 1} + + Notes + ----- + Only defined on non-null trees represented with directed edges from + parents to children. Uses Tarjan's off-line lowest-common-ancestors + algorithm. Runs in time $O(4 \times (V + E + P))$ time, where 4 is the largest + value of the inverse Ackermann function likely to ever come up in actual + use, and $P$ is the number of pairs requested (or $V^2$ if all are needed). + + Tarjan, R. E. (1979), "Applications of path compression on balanced trees", + Journal of the ACM 26 (4): 690-715, doi:10.1145/322154.322161. + + See Also + -------- + all_pairs_lowest_common_ancestor: similar routine for general DAGs + lowest_common_ancestor: just a single pair for general DAGs + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.") + + # Index pairs of interest for efficient lookup from either side. + if pairs is not None: + pair_dict = defaultdict(set) + # See note on all_pairs_lowest_common_ancestor. + if not isinstance(pairs, Mapping | Set): + pairs = set(pairs) + for u, v in pairs: + for n in (u, v): + if n not in G: + msg = f"The node {str(n)} is not in the digraph." + raise nx.NodeNotFound(msg) + pair_dict[u].add(v) + pair_dict[v].add(u) + + # If root is not specified, find the exactly one node with in degree 0 and + # use it. Raise an error if none are found, or more than one is. Also check + # for any nodes with in degree larger than 1, which would imply G is not a + # tree. + if root is None: + for n, deg in G.in_degree: + if deg == 0: + if root is not None: + msg = "No root specified and tree has multiple sources." + raise nx.NetworkXError(msg) + root = n + # checking deg>1 is not sufficient for MultiDiGraphs + elif deg > 1 and len(G.pred[n]) > 1: + msg = "Tree LCA only defined on trees; use DAG routine." + raise nx.NetworkXError(msg) + if root is None: + raise nx.NetworkXError("Graph contains a cycle.") + + # Iterative implementation of Tarjan's offline lca algorithm + # as described in CLRS on page 521 (2nd edition)/page 584 (3rd edition) + uf = UnionFind() + ancestors = {} + for node in G: + ancestors[node] = uf[node] + + colors = defaultdict(bool) + for node in nx.dfs_postorder_nodes(G, root): + colors[node] = True + for v in pair_dict[node] if pairs is not None else G: + if colors[v]: + # If the user requested both directions of a pair, give it. + # Otherwise, just give one. + if pairs is not None and (node, v) in pairs: + yield (node, v), ancestors[uf[v]] + if pairs is None or (v, node) in pairs: + yield (v, node), ancestors[uf[v]] + if node != root: + parent = arbitrary_element(G.pred[node]) + uf.union(parent, node) + ancestors[uf[parent]] = parent diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/mis.py b/venv/lib/python3.10/site-packages/networkx/algorithms/mis.py new file mode 100644 index 0000000000000000000000000000000000000000..fc70514d9ea03623592f7c48b910f70d2111cd51 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/mis.py @@ -0,0 +1,77 @@ +""" +Algorithm to find a maximal (not maximum) independent set. + +""" +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = ["maximal_independent_set"] + + +@not_implemented_for("directed") +@py_random_state(2) +@nx._dispatchable +def maximal_independent_set(G, nodes=None, seed=None): + """Returns a random maximal independent set guaranteed to contain + a given set of nodes. + + An independent set is a set of nodes such that the subgraph + of G induced by these nodes contains no edges. A maximal + independent set is an independent set such that it is not possible + to add a new node and still get an independent set. + + Parameters + ---------- + G : NetworkX graph + + nodes : list or iterable + Nodes that must be part of the independent set. This set of nodes + must be independent. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + indep_nodes : list + List of nodes that are part of a maximal independent set. + + Raises + ------ + NetworkXUnfeasible + If the nodes in the provided list are not part of the graph or + do not form an independent set, an exception is raised. + + NetworkXNotImplemented + If `G` is directed. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.maximal_independent_set(G) # doctest: +SKIP + [4, 0, 2] + >>> nx.maximal_independent_set(G, [1]) # doctest: +SKIP + [1, 3] + + Notes + ----- + This algorithm does not solve the maximum independent set problem. + + """ + if not nodes: + nodes = {seed.choice(list(G))} + else: + nodes = set(nodes) + if not nodes.issubset(G): + raise nx.NetworkXUnfeasible(f"{nodes} is not a subset of the nodes of G") + neighbors = set.union(*[set(G.adj[v]) for v in nodes]) + if set.intersection(neighbors, nodes): + raise nx.NetworkXUnfeasible(f"{nodes} is not an independent set of G") + indep_nodes = list(nodes) + available_nodes = set(G.nodes()).difference(neighbors.union(nodes)) + while available_nodes: + node = seed.choice(list(available_nodes)) + indep_nodes.append(node) + available_nodes.difference_update(list(G.adj[node]) + [node]) + return indep_nodes diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/moral.py b/venv/lib/python3.10/site-packages/networkx/algorithms/moral.py new file mode 100644 index 0000000000000000000000000000000000000000..e2acf80f6c3715da57dfc92e4c2d2daf986b3c29 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/moral.py @@ -0,0 +1,59 @@ +r"""Function for computing the moral graph of a directed graph.""" + +import itertools + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["moral_graph"] + + +@not_implemented_for("undirected") +@nx._dispatchable(returns_graph=True) +def moral_graph(G): + r"""Return the Moral Graph + + Returns the moralized graph of a given directed graph. + + Parameters + ---------- + G : NetworkX graph + Directed graph + + Returns + ------- + H : NetworkX graph + The undirected moralized graph of G + + Raises + ------ + NetworkXNotImplemented + If `G` is undirected. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (2, 5), (3, 4), (4, 3)]) + >>> G_moral = nx.moral_graph(G) + >>> G_moral.edges() + EdgeView([(1, 2), (2, 3), (2, 5), (2, 4), (3, 4)]) + + Notes + ----- + A moral graph is an undirected graph H = (V, E) generated from a + directed Graph, where if a node has more than one parent node, edges + between these parent nodes are inserted and all directed edges become + undirected. + + https://en.wikipedia.org/wiki/Moral_graph + + References + ---------- + .. [1] Wray L. Buntine. 1995. Chain graphs for learning. + In Proceedings of the Eleventh conference on Uncertainty + in artificial intelligence (UAI'95) + """ + H = G.to_undirected() + for preds in G.pred.values(): + predecessors_combinations = itertools.combinations(preds, r=2) + H.add_edges_from(predecessors_combinations) + return H diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/planar_drawing.py b/venv/lib/python3.10/site-packages/networkx/algorithms/planar_drawing.py new file mode 100644 index 0000000000000000000000000000000000000000..ea25809b6aeb198b23b44fe9878775d11b7e109c --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/planar_drawing.py @@ -0,0 +1,464 @@ +from collections import defaultdict + +import networkx as nx + +__all__ = ["combinatorial_embedding_to_pos"] + + +def combinatorial_embedding_to_pos(embedding, fully_triangulate=False): + """Assigns every node a (x, y) position based on the given embedding + + The algorithm iteratively inserts nodes of the input graph in a certain + order and rearranges previously inserted nodes so that the planar drawing + stays valid. This is done efficiently by only maintaining relative + positions during the node placements and calculating the absolute positions + at the end. For more information see [1]_. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + This defines the order of the edges + + fully_triangulate : bool + If set to True the algorithm adds edges to a copy of the input + embedding and makes it chordal. + + Returns + ------- + pos : dict + Maps each node to a tuple that defines the (x, y) position + + References + ---------- + .. [1] M. Chrobak and T.H. Payne: + A Linear-time Algorithm for Drawing a Planar Graph on a Grid 1989 + http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.51.6677 + + """ + if len(embedding.nodes()) < 4: + # Position the node in any triangle + default_positions = [(0, 0), (2, 0), (1, 1)] + pos = {} + for i, v in enumerate(embedding.nodes()): + pos[v] = default_positions[i] + return pos + + embedding, outer_face = triangulate_embedding(embedding, fully_triangulate) + + # The following dicts map a node to another node + # If a node is not in the key set it means that the node is not yet in G_k + # If a node maps to None then the corresponding subtree does not exist + left_t_child = {} + right_t_child = {} + + # The following dicts map a node to an integer + delta_x = {} + y_coordinate = {} + + node_list = get_canonical_ordering(embedding, outer_face) + + # 1. Phase: Compute relative positions + + # Initialization + v1, v2, v3 = node_list[0][0], node_list[1][0], node_list[2][0] + + delta_x[v1] = 0 + y_coordinate[v1] = 0 + right_t_child[v1] = v3 + left_t_child[v1] = None + + delta_x[v2] = 1 + y_coordinate[v2] = 0 + right_t_child[v2] = None + left_t_child[v2] = None + + delta_x[v3] = 1 + y_coordinate[v3] = 1 + right_t_child[v3] = v2 + left_t_child[v3] = None + + for k in range(3, len(node_list)): + vk, contour_nbrs = node_list[k] + wp = contour_nbrs[0] + wp1 = contour_nbrs[1] + wq = contour_nbrs[-1] + wq1 = contour_nbrs[-2] + adds_mult_tri = len(contour_nbrs) > 2 + + # Stretch gaps: + delta_x[wp1] += 1 + delta_x[wq] += 1 + + delta_x_wp_wq = sum(delta_x[x] for x in contour_nbrs[1:]) + + # Adjust offsets + delta_x[vk] = (-y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 + y_coordinate[vk] = (y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 + delta_x[wq] = delta_x_wp_wq - delta_x[vk] + if adds_mult_tri: + delta_x[wp1] -= delta_x[vk] + + # Install v_k: + right_t_child[wp] = vk + right_t_child[vk] = wq + if adds_mult_tri: + left_t_child[vk] = wp1 + right_t_child[wq1] = None + else: + left_t_child[vk] = None + + # 2. Phase: Set absolute positions + pos = {} + pos[v1] = (0, y_coordinate[v1]) + remaining_nodes = [v1] + while remaining_nodes: + parent_node = remaining_nodes.pop() + + # Calculate position for left child + set_position( + parent_node, left_t_child, remaining_nodes, delta_x, y_coordinate, pos + ) + # Calculate position for right child + set_position( + parent_node, right_t_child, remaining_nodes, delta_x, y_coordinate, pos + ) + return pos + + +def set_position(parent, tree, remaining_nodes, delta_x, y_coordinate, pos): + """Helper method to calculate the absolute position of nodes.""" + child = tree[parent] + parent_node_x = pos[parent][0] + if child is not None: + # Calculate pos of child + child_x = parent_node_x + delta_x[child] + pos[child] = (child_x, y_coordinate[child]) + # Remember to calculate pos of its children + remaining_nodes.append(child) + + +def get_canonical_ordering(embedding, outer_face): + """Returns a canonical ordering of the nodes + + The canonical ordering of nodes (v1, ..., vn) must fulfill the following + conditions: + (See Lemma 1 in [2]_) + + - For the subgraph G_k of the input graph induced by v1, ..., vk it holds: + - 2-connected + - internally triangulated + - the edge (v1, v2) is part of the outer face + - For a node v(k+1) the following holds: + - The node v(k+1) is part of the outer face of G_k + - It has at least two neighbors in G_k + - All neighbors of v(k+1) in G_k lie consecutively on the outer face of + G_k (excluding the edge (v1, v2)). + + The algorithm used here starts with G_n (containing all nodes). It first + selects the nodes v1 and v2. And then tries to find the order of the other + nodes by checking which node can be removed in order to fulfill the + conditions mentioned above. This is done by calculating the number of + chords of nodes on the outer face. For more information see [1]_. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + The embedding must be triangulated + outer_face : list + The nodes on the outer face of the graph + + Returns + ------- + ordering : list + A list of tuples `(vk, wp_wq)`. Here `vk` is the node at this position + in the canonical ordering. The element `wp_wq` is a list of nodes that + make up the outer face of G_k. + + References + ---------- + .. [1] Steven Chaplick. + Canonical Orders of Planar Graphs and (some of) Their Applications 2015 + https://wuecampus2.uni-wuerzburg.de/moodle/pluginfile.php/545727/mod_resource/content/0/vg-ss15-vl03-canonical-orders-druckversion.pdf + .. [2] M. Chrobak and T.H. Payne: + A Linear-time Algorithm for Drawing a Planar Graph on a Grid 1989 + http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.51.6677 + + """ + v1 = outer_face[0] + v2 = outer_face[1] + chords = defaultdict(int) # Maps nodes to the number of their chords + marked_nodes = set() + ready_to_pick = set(outer_face) + + # Initialize outer_face_ccw_nbr (do not include v1 -> v2) + outer_face_ccw_nbr = {} + prev_nbr = v2 + for idx in range(2, len(outer_face)): + outer_face_ccw_nbr[prev_nbr] = outer_face[idx] + prev_nbr = outer_face[idx] + outer_face_ccw_nbr[prev_nbr] = v1 + + # Initialize outer_face_cw_nbr (do not include v2 -> v1) + outer_face_cw_nbr = {} + prev_nbr = v1 + for idx in range(len(outer_face) - 1, 0, -1): + outer_face_cw_nbr[prev_nbr] = outer_face[idx] + prev_nbr = outer_face[idx] + + def is_outer_face_nbr(x, y): + if x not in outer_face_ccw_nbr: + return outer_face_cw_nbr[x] == y + if x not in outer_face_cw_nbr: + return outer_face_ccw_nbr[x] == y + return outer_face_ccw_nbr[x] == y or outer_face_cw_nbr[x] == y + + def is_on_outer_face(x): + return x not in marked_nodes and (x in outer_face_ccw_nbr or x == v1) + + # Initialize number of chords + for v in outer_face: + for nbr in embedding.neighbors_cw_order(v): + if is_on_outer_face(nbr) and not is_outer_face_nbr(v, nbr): + chords[v] += 1 + ready_to_pick.discard(v) + + # Initialize canonical_ordering + canonical_ordering = [None] * len(embedding.nodes()) + canonical_ordering[0] = (v1, []) + canonical_ordering[1] = (v2, []) + ready_to_pick.discard(v1) + ready_to_pick.discard(v2) + + for k in range(len(embedding.nodes()) - 1, 1, -1): + # 1. Pick v from ready_to_pick + v = ready_to_pick.pop() + marked_nodes.add(v) + + # v has exactly two neighbors on the outer face (wp and wq) + wp = None + wq = None + # Iterate over neighbors of v to find wp and wq + nbr_iterator = iter(embedding.neighbors_cw_order(v)) + while True: + nbr = next(nbr_iterator) + if nbr in marked_nodes: + # Only consider nodes that are not yet removed + continue + if is_on_outer_face(nbr): + # nbr is either wp or wq + if nbr == v1: + wp = v1 + elif nbr == v2: + wq = v2 + else: + if outer_face_cw_nbr[nbr] == v: + # nbr is wp + wp = nbr + else: + # nbr is wq + wq = nbr + if wp is not None and wq is not None: + # We don't need to iterate any further + break + + # Obtain new nodes on outer face (neighbors of v from wp to wq) + wp_wq = [wp] + nbr = wp + while nbr != wq: + # Get next neighbor (clockwise on the outer face) + next_nbr = embedding[v][nbr]["ccw"] + wp_wq.append(next_nbr) + # Update outer face + outer_face_cw_nbr[nbr] = next_nbr + outer_face_ccw_nbr[next_nbr] = nbr + # Move to next neighbor of v + nbr = next_nbr + + if len(wp_wq) == 2: + # There was a chord between wp and wq, decrease number of chords + chords[wp] -= 1 + if chords[wp] == 0: + ready_to_pick.add(wp) + chords[wq] -= 1 + if chords[wq] == 0: + ready_to_pick.add(wq) + else: + # Update all chords involving w_(p+1) to w_(q-1) + new_face_nodes = set(wp_wq[1:-1]) + for w in new_face_nodes: + # If we do not find a chord for w later we can pick it next + ready_to_pick.add(w) + for nbr in embedding.neighbors_cw_order(w): + if is_on_outer_face(nbr) and not is_outer_face_nbr(w, nbr): + # There is a chord involving w + chords[w] += 1 + ready_to_pick.discard(w) + if nbr not in new_face_nodes: + # Also increase chord for the neighbor + # We only iterator over new_face_nodes + chords[nbr] += 1 + ready_to_pick.discard(nbr) + # Set the canonical ordering node and the list of contour neighbors + canonical_ordering[k] = (v, wp_wq) + + return canonical_ordering + + +def triangulate_face(embedding, v1, v2): + """Triangulates the face given by half edge (v, w) + + Parameters + ---------- + embedding : nx.PlanarEmbedding + v1 : node + The half-edge (v1, v2) belongs to the face that gets triangulated + v2 : node + """ + _, v3 = embedding.next_face_half_edge(v1, v2) + _, v4 = embedding.next_face_half_edge(v2, v3) + if v1 in (v2, v3): + # The component has less than 3 nodes + return + while v1 != v4: + # Add edge if not already present on other side + if embedding.has_edge(v1, v3): + # Cannot triangulate at this position + v1, v2, v3 = v2, v3, v4 + else: + # Add edge for triangulation + embedding.add_half_edge(v1, v3, ccw=v2) + embedding.add_half_edge(v3, v1, cw=v2) + v1, v2, v3 = v1, v3, v4 + # Get next node + _, v4 = embedding.next_face_half_edge(v2, v3) + + +def triangulate_embedding(embedding, fully_triangulate=True): + """Triangulates the embedding. + + Traverses faces of the embedding and adds edges to a copy of the + embedding to triangulate it. + The method also ensures that the resulting graph is 2-connected by adding + edges if the same vertex is contained twice on a path around a face. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + The input graph must contain at least 3 nodes. + + fully_triangulate : bool + If set to False the face with the most nodes is chooses as outer face. + This outer face does not get triangulated. + + Returns + ------- + (embedding, outer_face) : (nx.PlanarEmbedding, list) tuple + The element `embedding` is a new embedding containing all edges from + the input embedding and the additional edges to triangulate the graph. + The element `outer_face` is a list of nodes that lie on the outer face. + If the graph is fully triangulated these are three arbitrary connected + nodes. + + """ + if len(embedding.nodes) <= 1: + return embedding, list(embedding.nodes) + embedding = nx.PlanarEmbedding(embedding) + + # Get a list with a node for each connected component + component_nodes = [next(iter(x)) for x in nx.connected_components(embedding)] + + # 1. Make graph a single component (add edge between components) + for i in range(len(component_nodes) - 1): + v1 = component_nodes[i] + v2 = component_nodes[i + 1] + embedding.connect_components(v1, v2) + + # 2. Calculate faces, ensure 2-connectedness and determine outer face + outer_face = [] # A face with the most number of nodes + face_list = [] + edges_visited = set() # Used to keep track of already visited faces + for v in embedding.nodes(): + for w in embedding.neighbors_cw_order(v): + new_face = make_bi_connected(embedding, v, w, edges_visited) + if new_face: + # Found a new face + face_list.append(new_face) + if len(new_face) > len(outer_face): + # The face is a candidate to be the outer face + outer_face = new_face + + # 3. Triangulate (internal) faces + for face in face_list: + if face is not outer_face or fully_triangulate: + # Triangulate this face + triangulate_face(embedding, face[0], face[1]) + + if fully_triangulate: + v1 = outer_face[0] + v2 = outer_face[1] + v3 = embedding[v2][v1]["ccw"] + outer_face = [v1, v2, v3] + + return embedding, outer_face + + +def make_bi_connected(embedding, starting_node, outgoing_node, edges_counted): + """Triangulate a face and make it 2-connected + + This method also adds all edges on the face to `edges_counted`. + + Parameters + ---------- + embedding: nx.PlanarEmbedding + The embedding that defines the faces + starting_node : node + A node on the face + outgoing_node : node + A node such that the half edge (starting_node, outgoing_node) belongs + to the face + edges_counted: set + Set of all half-edges that belong to a face that have been visited + + Returns + ------- + face_nodes: list + A list of all nodes at the border of this face + """ + + # Check if the face has already been calculated + if (starting_node, outgoing_node) in edges_counted: + # This face was already counted + return [] + edges_counted.add((starting_node, outgoing_node)) + + # Add all edges to edges_counted which have this face to their left + v1 = starting_node + v2 = outgoing_node + face_list = [starting_node] # List of nodes around the face + face_set = set(face_list) # Set for faster queries + _, v3 = embedding.next_face_half_edge(v1, v2) + + # Move the nodes v1, v2, v3 around the face: + while v2 != starting_node or v3 != outgoing_node: + if v1 == v2: + raise nx.NetworkXException("Invalid half-edge") + # cycle is not completed yet + if v2 in face_set: + # v2 encountered twice: Add edge to ensure 2-connectedness + embedding.add_half_edge(v1, v3, ccw=v2) + embedding.add_half_edge(v3, v1, cw=v2) + edges_counted.add((v2, v3)) + edges_counted.add((v3, v1)) + v2 = v1 + else: + face_set.add(v2) + face_list.append(v2) + + # set next edge + v1 = v2 + v2, v3 = embedding.next_face_half_edge(v2, v3) + + # remember that this edge has been counted + edges_counted.add((v1, v2)) + + return face_list diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/richclub.py b/venv/lib/python3.10/site-packages/networkx/algorithms/richclub.py new file mode 100644 index 0000000000000000000000000000000000000000..445b27d142547e5cad04e00abc9ca33d45edbee6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/richclub.py @@ -0,0 +1,138 @@ +"""Functions for computing rich-club coefficients.""" + +from itertools import accumulate + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["rich_club_coefficient"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def rich_club_coefficient(G, normalized=True, Q=100, seed=None): + r"""Returns the rich-club coefficient of the graph `G`. + + For each degree *k*, the *rich-club coefficient* is the ratio of the + number of actual to the number of potential edges for nodes with + degree greater than *k*: + + .. math:: + + \phi(k) = \frac{2 E_k}{N_k (N_k - 1)} + + where `N_k` is the number of nodes with degree larger than *k*, and + `E_k` is the number of edges among those nodes. + + Parameters + ---------- + G : NetworkX graph + Undirected graph with neither parallel edges nor self-loops. + normalized : bool (optional) + Normalize using randomized network as in [1]_ + Q : float (optional, default=100) + If `normalized` is True, perform `Q * m` double-edge + swaps, where `m` is the number of edges in `G`, to use as a + null-model for normalization. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + rc : dictionary + A dictionary, keyed by degree, with rich-club coefficient values. + + Raises + ------ + NetworkXError + If `G` has fewer than four nodes and ``normalized=True``. + A randomly sampled graph for normalization cannot be generated in this case. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) + >>> rc = nx.rich_club_coefficient(G, normalized=False, seed=42) + >>> rc[0] + 0.4 + + Notes + ----- + The rich club definition and algorithm are found in [1]_. This + algorithm ignores any edge weights and is not defined for directed + graphs or graphs with parallel edges or self loops. + + Normalization is done by computing the rich club coefficient for a randomly + sampled graph with the same degree distribution as `G` by + repeatedly swapping the endpoints of existing edges. For graphs with fewer than 4 + nodes, it is not possible to generate a random graph with a prescribed + degree distribution, as the degree distribution fully determines the graph + (hence making the coefficients trivially normalized to 1). + This function raises an exception in this case. + + Estimates for appropriate values of `Q` are found in [2]_. + + References + ---------- + .. [1] Julian J. McAuley, Luciano da Fontoura Costa, + and Tibério S. Caetano, + "The rich-club phenomenon across complex network hierarchies", + Applied Physics Letters Vol 91 Issue 8, August 2007. + https://arxiv.org/abs/physics/0701290 + .. [2] R. Milo, N. Kashtan, S. Itzkovitz, M. E. J. Newman, U. Alon, + "Uniform generation of random graphs with arbitrary degree + sequences", 2006. https://arxiv.org/abs/cond-mat/0312028 + """ + if nx.number_of_selfloops(G) > 0: + raise Exception( + "rich_club_coefficient is not implemented for graphs with self loops." + ) + rc = _compute_rc(G) + if normalized: + # make R a copy of G, randomize with Q*|E| double edge swaps + # and use rich_club coefficient of R to normalize + R = G.copy() + E = R.number_of_edges() + nx.double_edge_swap(R, Q * E, max_tries=Q * E * 10, seed=seed) + rcran = _compute_rc(R) + rc = {k: v / rcran[k] for k, v in rc.items()} + return rc + + +def _compute_rc(G): + """Returns the rich-club coefficient for each degree in the graph + `G`. + + `G` is an undirected graph without multiedges. + + Returns a dictionary mapping degree to rich-club coefficient for + that degree. + + """ + deghist = nx.degree_histogram(G) + total = sum(deghist) + # Compute the number of nodes with degree greater than `k`, for each + # degree `k` (omitting the last entry, which is zero). + nks = (total - cs for cs in accumulate(deghist) if total - cs > 1) + # Create a sorted list of pairs of edge endpoint degrees. + # + # The list is sorted in reverse order so that we can pop from the + # right side of the list later, instead of popping from the left + # side of the list, which would have a linear time cost. + edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), reverse=True) + ek = G.number_of_edges() + if ek == 0: + return {} + + k1, k2 = edge_degrees.pop() + rc = {} + for d, nk in enumerate(nks): + while k1 <= d: + if len(edge_degrees) == 0: + ek = 0 + break + k1, k2 = edge_degrees.pop() + ek -= 1 + rc[d] = 2 * ek / (nk * (nk - 1)) + return rc diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/smetric.py b/venv/lib/python3.10/site-packages/networkx/algorithms/smetric.py new file mode 100644 index 0000000000000000000000000000000000000000..5a27014ee55405e1a6412fcdad5ef494e9d73635 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/smetric.py @@ -0,0 +1,60 @@ +import networkx as nx + +__all__ = ["s_metric"] + + +@nx._dispatchable +def s_metric(G, **kwargs): + """Returns the s-metric [1]_ of graph. + + The s-metric is defined as the sum of the products ``deg(u) * deg(v)`` + for every edge ``(u, v)`` in `G`. + + Parameters + ---------- + G : graph + The graph used to compute the s-metric. + normalized : bool (optional) + Normalize the value. + + .. deprecated:: 3.2 + + The `normalized` keyword argument is deprecated and will be removed + in the future + + Returns + ------- + s : float + The s-metric of the graph. + + References + ---------- + .. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger, + Towards a Theory of Scale-Free Graphs: + Definition, Properties, and Implications (Extended Version), 2005. + https://arxiv.org/abs/cond-mat/0501169 + """ + # NOTE: This entire code block + the **kwargs in the signature can all be + # removed when the deprecation expires. + # Normalized is always False, since all `normalized=True` did was raise + # a NotImplementedError + if kwargs: + # Warn for `normalize`, raise for any other kwarg + if "normalized" in kwargs: + import warnings + + warnings.warn( + "\n\nThe `normalized` keyword is deprecated and will be removed\n" + "in the future. To silence this warning, remove `normalized`\n" + "when calling `s_metric`.\n\n" + "The value of `normalized` is ignored.", + DeprecationWarning, + stacklevel=3, + ) + else: + # Typical raising behavior for Python when kwarg not recognized + raise TypeError( + f"s_metric got an unexpected keyword argument '{list(kwargs.keys())[0]}'" + ) + + return float(sum(G.degree(u) * G.degree(v) for (u, v) in G.edges())) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/time_dependent.py b/venv/lib/python3.10/site-packages/networkx/algorithms/time_dependent.py new file mode 100644 index 0000000000000000000000000000000000000000..d67cdcf0b8eaecdef8497c77edd3144e96501173 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/time_dependent.py @@ -0,0 +1,142 @@ +"""Time dependent algorithms.""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["cd_index"] + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable(node_attrs={"time": None, "weight": 1}) +def cd_index(G, node, time_delta, *, time="time", weight=None): + r"""Compute the CD index for `node` within the graph `G`. + + Calculates the CD index for the given node of the graph, + considering only its predecessors who have the `time` attribute + smaller than or equal to the `time` attribute of the `node` + plus `time_delta`. + + Parameters + ---------- + G : graph + A directed networkx graph whose nodes have `time` attributes and optionally + `weight` attributes (if a weight is not given, it is considered 1). + node : node + The node for which the CD index is calculated. + time_delta : numeric or timedelta + Amount of time after the `time` attribute of the `node`. The value of + `time_delta` must support comparison with the `time` node attribute. For + example, if the `time` attribute of the nodes are `datetime.datetime` + objects, then `time_delta` should be a `datetime.timedelta` object. + time : string (Optional, default is "time") + The name of the node attribute that will be used for the calculations. + weight : string (Optional, default is None) + The name of the node attribute used as weight. + + Returns + ------- + float + The CD index calculated for the node `node` within the graph `G`. + + Raises + ------ + NetworkXError + If not all nodes have a `time` attribute or + `time_delta` and `time` attribute types are not compatible or + `n` equals 0. + + NetworkXNotImplemented + If `G` is a non-directed graph or a multigraph. + + Examples + -------- + >>> from datetime import datetime, timedelta + >>> G = nx.DiGraph() + >>> nodes = { + ... 1: {"time": datetime(2015, 1, 1)}, + ... 2: {"time": datetime(2012, 1, 1), "weight": 4}, + ... 3: {"time": datetime(2010, 1, 1)}, + ... 4: {"time": datetime(2008, 1, 1)}, + ... 5: {"time": datetime(2014, 1, 1)}, + ... } + >>> G.add_nodes_from([(n, nodes[n]) for n in nodes]) + >>> edges = [(1, 3), (1, 4), (2, 3), (3, 4), (3, 5)] + >>> G.add_edges_from(edges) + >>> delta = timedelta(days=5 * 365) + >>> nx.cd_index(G, 3, time_delta=delta, time="time") + 0.5 + >>> nx.cd_index(G, 3, time_delta=delta, time="time", weight="weight") + 0.12 + + Integers can also be used for the time values: + >>> node_times = {1: 2015, 2: 2012, 3: 2010, 4: 2008, 5: 2014} + >>> nx.set_node_attributes(G, node_times, "new_time") + >>> nx.cd_index(G, 3, time_delta=4, time="new_time") + 0.5 + >>> nx.cd_index(G, 3, time_delta=4, time="new_time", weight="weight") + 0.12 + + Notes + ----- + This method implements the algorithm for calculating the CD index, + as described in the paper by Funk and Owen-Smith [1]_. The CD index + is used in order to check how consolidating or destabilizing a patent + is, hence the nodes of the graph represent patents and the edges show + the citations between these patents. The mathematical model is given + below: + + .. math:: + CD_{t}=\frac{1}{n_{t}}\sum_{i=1}^{n}\frac{-2f_{it}b_{it}+f_{it}}{w_{it}}, + + where `f_{it}` equals 1 if `i` cites the focal patent else 0, `b_{it}` equals + 1 if `i` cites any of the focal patents successors else 0, `n_{t}` is the number + of forward citations in `i` and `w_{it}` is a matrix of weight for patent `i` + at time `t`. + + The `datetime.timedelta` package can lead to off-by-one issues when converting + from years to days. In the example above `timedelta(days=5 * 365)` looks like + 5 years, but it isn't because of leap year days. So it gives the same result + as `timedelta(days=4 * 365)`. But using `timedelta(days=5 * 365 + 1)` gives + a 5 year delta **for this choice of years** but may not if the 5 year gap has + more than 1 leap year. To avoid these issues, use integers to represent years, + or be very careful when you convert units of time. + + References + ---------- + .. [1] Funk, Russell J., and Jason Owen-Smith. + "A dynamic network measure of technological change." + Management science 63, no. 3 (2017): 791-817. + http://russellfunk.org/cdindex/static/papers/funk_ms_2017.pdf + + """ + if not all(time in G.nodes[n] for n in G): + raise nx.NetworkXError("Not all nodes have a 'time' attribute.") + + try: + # get target_date + target_date = G.nodes[node][time] + time_delta + # keep the predecessors that existed before the target date + pred = {i for i in G.pred[node] if G.nodes[i][time] <= target_date} + except: + raise nx.NetworkXError( + "Addition and comparison are not supported between 'time_delta' " + "and 'time' types." + ) + + # -1 if any edge between node's predecessors and node's successors, else 1 + b = [-1 if any(j in G[i] for j in G[node]) else 1 for i in pred] + + # n is size of the union of the focal node's predecessors and its successors' predecessors + n = len(pred.union(*(G.pred[s].keys() - {node} for s in G[node]))) + if n == 0: + raise nx.NetworkXError("The cd index cannot be defined.") + + # calculate cd index + if weight is None: + return round(sum(bi for bi in b) / n, 2) + else: + # If a node has the specified weight attribute, its weight is used in the calculation + # otherwise, a weight of 1 is assumed for that node + weights = [G.nodes[i].get(weight, 1) for i in pred] + return round(sum(bi / wt for bi, wt in zip(b, weights)) / n, 2) diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/triads.py b/venv/lib/python3.10/site-packages/networkx/algorithms/triads.py new file mode 100644 index 0000000000000000000000000000000000000000..1e67c145362bb14b9cc35770232d0bf1f97a611a --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/triads.py @@ -0,0 +1,604 @@ +# See https://github.com/networkx/networkx/pull/1474 +# Copyright 2011 Reya Group +# Copyright 2011 Alex Levenson +# Copyright 2011 Diederik van Liere +"""Functions for analyzing triads of a graph.""" + +from collections import defaultdict +from itertools import combinations, permutations + +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = [ + "triadic_census", + "is_triad", + "all_triplets", + "all_triads", + "triads_by_type", + "triad_type", + "random_triad", +] + +#: The integer codes representing each type of triad. +#: +#: Triads that are the same up to symmetry have the same code. +TRICODES = ( + 1, + 2, + 2, + 3, + 2, + 4, + 6, + 8, + 2, + 6, + 5, + 7, + 3, + 8, + 7, + 11, + 2, + 6, + 4, + 8, + 5, + 9, + 9, + 13, + 6, + 10, + 9, + 14, + 7, + 14, + 12, + 15, + 2, + 5, + 6, + 7, + 6, + 9, + 10, + 14, + 4, + 9, + 9, + 12, + 8, + 13, + 14, + 15, + 3, + 7, + 8, + 11, + 7, + 12, + 14, + 15, + 8, + 14, + 13, + 15, + 11, + 15, + 15, + 16, +) + +#: The names of each type of triad. The order of the elements is +#: important: it corresponds to the tricodes given in :data:`TRICODES`. +TRIAD_NAMES = ( + "003", + "012", + "102", + "021D", + "021U", + "021C", + "111D", + "111U", + "030T", + "030C", + "201", + "120D", + "120U", + "120C", + "210", + "300", +) + + +#: A dictionary mapping triad code to triad name. +TRICODE_TO_NAME = {i: TRIAD_NAMES[code - 1] for i, code in enumerate(TRICODES)} + + +def _tricode(G, v, u, w): + """Returns the integer code of the given triad. + + This is some fancy magic that comes from Batagelj and Mrvar's paper. It + treats each edge joining a pair of `v`, `u`, and `w` as a bit in + the binary representation of an integer. + + """ + combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), (w, u, 32)) + return sum(x for u, v, x in combos if v in G[u]) + + +@not_implemented_for("undirected") +@nx._dispatchable +def triadic_census(G, nodelist=None): + """Determines the triadic census of a directed graph. + + The triadic census is a count of how many of the 16 possible types of + triads are present in a directed graph. If a list of nodes is passed, then + only those triads are taken into account which have elements of nodelist in them. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + nodelist : list + List of nodes for which you want to calculate triadic census + + Returns + ------- + census : dict + Dictionary with triad type as keys and number of occurrences as values. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)]) + >>> triadic_census = nx.triadic_census(G) + >>> for key, value in triadic_census.items(): + ... print(f"{key}: {value}") + 003: 0 + 012: 0 + 102: 0 + 021D: 0 + 021U: 0 + 021C: 0 + 111D: 0 + 111U: 0 + 030T: 2 + 030C: 2 + 201: 0 + 120D: 0 + 120U: 0 + 120C: 0 + 210: 0 + 300: 0 + + Notes + ----- + This algorithm has complexity $O(m)$ where $m$ is the number of edges in + the graph. + + For undirected graphs, the triadic census can be computed by first converting + the graph into a directed graph using the ``G.to_directed()`` method. + After this conversion, only the triad types 003, 102, 201 and 300 will be + present in the undirected scenario. + + Raises + ------ + ValueError + If `nodelist` contains duplicate nodes or nodes not in `G`. + If you want to ignore this you can preprocess with `set(nodelist) & G.nodes` + + See also + -------- + triad_graph + + References + ---------- + .. [1] Vladimir Batagelj and Andrej Mrvar, A subquadratic triad census + algorithm for large sparse networks with small maximum degree, + University of Ljubljana, + http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf + + """ + nodeset = set(G.nbunch_iter(nodelist)) + if nodelist is not None and len(nodelist) != len(nodeset): + raise ValueError("nodelist includes duplicate nodes or nodes not in G") + + N = len(G) + Nnot = N - len(nodeset) # can signal special counting for subset of nodes + + # create an ordering of nodes with nodeset nodes first + m = {n: i for i, n in enumerate(nodeset)} + if Nnot: + # add non-nodeset nodes later in the ordering + not_nodeset = G.nodes - nodeset + m.update((n, i + N) for i, n in enumerate(not_nodeset)) + + # build all_neighbor dicts for easy counting + # After Python 3.8 can leave off these keys(). Speedup also using G._pred + # nbrs = {n: G._pred[n].keys() | G._succ[n].keys() for n in G} + nbrs = {n: G.pred[n].keys() | G.succ[n].keys() for n in G} + dbl_nbrs = {n: G.pred[n].keys() & G.succ[n].keys() for n in G} + + if Nnot: + sgl_nbrs = {n: G.pred[n].keys() ^ G.succ[n].keys() for n in not_nodeset} + # find number of edges not incident to nodes in nodeset + sgl = sum(1 for n in not_nodeset for nbr in sgl_nbrs[n] if nbr not in nodeset) + sgl_edges_outside = sgl // 2 + dbl = sum(1 for n in not_nodeset for nbr in dbl_nbrs[n] if nbr not in nodeset) + dbl_edges_outside = dbl // 2 + + # Initialize the count for each triad to be zero. + census = {name: 0 for name in TRIAD_NAMES} + # Main loop over nodes + for v in nodeset: + vnbrs = nbrs[v] + dbl_vnbrs = dbl_nbrs[v] + if Nnot: + # set up counts of edges attached to v. + sgl_unbrs_bdy = sgl_unbrs_out = dbl_unbrs_bdy = dbl_unbrs_out = 0 + for u in vnbrs: + if m[u] <= m[v]: + continue + unbrs = nbrs[u] + neighbors = (vnbrs | unbrs) - {u, v} + # Count connected triads. + for w in neighbors: + if m[u] < m[w] or (m[v] < m[w] < m[u] and v not in nbrs[w]): + code = _tricode(G, v, u, w) + census[TRICODE_TO_NAME[code]] += 1 + + # Use a formula for dyadic triads with edge incident to v + if u in dbl_vnbrs: + census["102"] += N - len(neighbors) - 2 + else: + census["012"] += N - len(neighbors) - 2 + + # Count edges attached to v. Subtract later to get triads with v isolated + # _out are (u,unbr) for unbrs outside boundary of nodeset + # _bdy are (u,unbr) for unbrs on boundary of nodeset (get double counted) + if Nnot and u not in nodeset: + sgl_unbrs = sgl_nbrs[u] + sgl_unbrs_bdy += len(sgl_unbrs & vnbrs - nodeset) + sgl_unbrs_out += len(sgl_unbrs - vnbrs - nodeset) + dbl_unbrs = dbl_nbrs[u] + dbl_unbrs_bdy += len(dbl_unbrs & vnbrs - nodeset) + dbl_unbrs_out += len(dbl_unbrs - vnbrs - nodeset) + # if nodeset == G.nodes, skip this b/c we will find the edge later. + if Nnot: + # Count edges outside nodeset not connected with v (v isolated triads) + census["012"] += sgl_edges_outside - (sgl_unbrs_out + sgl_unbrs_bdy // 2) + census["102"] += dbl_edges_outside - (dbl_unbrs_out + dbl_unbrs_bdy // 2) + + # calculate null triads: "003" + # null triads = total number of possible triads - all found triads + total_triangles = (N * (N - 1) * (N - 2)) // 6 + triangles_without_nodeset = (Nnot * (Nnot - 1) * (Nnot - 2)) // 6 + total_census = total_triangles - triangles_without_nodeset + census["003"] = total_census - sum(census.values()) + + return census + + +@nx._dispatchable +def is_triad(G): + """Returns True if the graph G is a triad, else False. + + Parameters + ---------- + G : graph + A NetworkX Graph + + Returns + ------- + istriad : boolean + Whether G is a valid triad + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + >>> nx.is_triad(G) + True + >>> G.add_edge(0, 1) + >>> nx.is_triad(G) + False + """ + if isinstance(G, nx.Graph): + if G.order() == 3 and nx.is_directed(G): + if not any((n, n) in G.edges() for n in G.nodes()): + return True + return False + + +@not_implemented_for("undirected") +@nx._dispatchable +def all_triplets(G): + """Returns a generator of all possible sets of 3 nodes in a DiGraph. + + .. deprecated:: 3.3 + + all_triplets is deprecated and will be removed in NetworkX version 3.5. + Use `itertools.combinations` instead:: + + all_triplets = itertools.combinations(G, 3) + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + triplets : generator of 3-tuples + Generator of tuples of 3 nodes + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + >>> list(nx.all_triplets(G)) + [(1, 2, 3), (1, 2, 4), (1, 3, 4), (2, 3, 4)] + + """ + import warnings + + warnings.warn( + ( + "\n\nall_triplets is deprecated and will be rmoved in v3.5.\n" + "Use `itertools.combinations(G, 3)` instead." + ), + category=DeprecationWarning, + stacklevel=4, + ) + triplets = combinations(G.nodes(), 3) + return triplets + + +@not_implemented_for("undirected") +@nx._dispatchable(returns_graph=True) +def all_triads(G): + """A generator of all possible triads in G. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + all_triads : generator of DiGraphs + Generator of triads (order-3 DiGraphs) + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)]) + >>> for triad in nx.all_triads(G): + ... print(triad.edges) + [(1, 2), (2, 3), (3, 1)] + [(1, 2), (4, 1), (4, 2)] + [(3, 1), (3, 4), (4, 1)] + [(2, 3), (3, 4), (4, 2)] + + """ + triplets = combinations(G.nodes(), 3) + for triplet in triplets: + yield G.subgraph(triplet).copy() + + +@not_implemented_for("undirected") +@nx._dispatchable +def triads_by_type(G): + """Returns a list of all triads for each triad type in a directed graph. + There are exactly 16 different types of triads possible. Suppose 1, 2, 3 are three + nodes, they will be classified as a particular triad type if their connections + are as follows: + + - 003: 1, 2, 3 + - 012: 1 -> 2, 3 + - 102: 1 <-> 2, 3 + - 021D: 1 <- 2 -> 3 + - 021U: 1 -> 2 <- 3 + - 021C: 1 -> 2 -> 3 + - 111D: 1 <-> 2 <- 3 + - 111U: 1 <-> 2 -> 3 + - 030T: 1 -> 2 -> 3, 1 -> 3 + - 030C: 1 <- 2 <- 3, 1 -> 3 + - 201: 1 <-> 2 <-> 3 + - 120D: 1 <- 2 -> 3, 1 <-> 3 + - 120U: 1 -> 2 <- 3, 1 <-> 3 + - 120C: 1 -> 2 -> 3, 1 <-> 3 + - 210: 1 -> 2 <-> 3, 1 <-> 3 + - 300: 1 <-> 2 <-> 3, 1 <-> 3 + + Refer to the :doc:`example gallery ` + for visual examples of the triad types. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + tri_by_type : dict + Dictionary with triad types as keys and lists of triads as values. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)]) + >>> dict = nx.triads_by_type(G) + >>> dict["120C"][0].edges() + OutEdgeView([(1, 2), (1, 3), (2, 3), (3, 1)]) + >>> dict["012"][0].edges() + OutEdgeView([(1, 2)]) + + References + ---------- + .. [1] Snijders, T. (2012). "Transitivity and triads." University of + Oxford. + https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf + """ + # num_triads = o * (o - 1) * (o - 2) // 6 + # if num_triads > TRIAD_LIMIT: print(WARNING) + all_tri = all_triads(G) + tri_by_type = defaultdict(list) + for triad in all_tri: + name = triad_type(triad) + tri_by_type[name].append(triad) + return tri_by_type + + +@not_implemented_for("undirected") +@nx._dispatchable +def triad_type(G): + """Returns the sociological triad type for a triad. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph with 3 nodes + + Returns + ------- + triad_type : str + A string identifying the triad type + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + >>> nx.triad_type(G) + '030C' + >>> G.add_edge(1, 3) + >>> nx.triad_type(G) + '120C' + + Notes + ----- + There can be 6 unique edges in a triad (order-3 DiGraph) (so 2^^6=64 unique + triads given 3 nodes). These 64 triads each display exactly 1 of 16 + topologies of triads (topologies can be permuted). These topologies are + identified by the following notation: + + {m}{a}{n}{type} (for example: 111D, 210, 102) + + Here: + + {m} = number of mutual ties (takes 0, 1, 2, 3); a mutual tie is (0,1) + AND (1,0) + {a} = number of asymmetric ties (takes 0, 1, 2, 3); an asymmetric tie + is (0,1) BUT NOT (1,0) or vice versa + {n} = number of null ties (takes 0, 1, 2, 3); a null tie is NEITHER + (0,1) NOR (1,0) + {type} = a letter (takes U, D, C, T) corresponding to up, down, cyclical + and transitive. This is only used for topologies that can have + more than one form (eg: 021D and 021U). + + References + ---------- + .. [1] Snijders, T. (2012). "Transitivity and triads." University of + Oxford. + https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf + """ + if not is_triad(G): + raise nx.NetworkXAlgorithmError("G is not a triad (order-3 DiGraph)") + num_edges = len(G.edges()) + if num_edges == 0: + return "003" + elif num_edges == 1: + return "012" + elif num_edges == 2: + e1, e2 = G.edges() + if set(e1) == set(e2): + return "102" + elif e1[0] == e2[0]: + return "021D" + elif e1[1] == e2[1]: + return "021U" + elif e1[1] == e2[0] or e2[1] == e1[0]: + return "021C" + elif num_edges == 3: + for e1, e2, e3 in permutations(G.edges(), 3): + if set(e1) == set(e2): + if e3[0] in e1: + return "111U" + # e3[1] in e1: + return "111D" + elif set(e1).symmetric_difference(set(e2)) == set(e3): + if {e1[0], e2[0], e3[0]} == {e1[0], e2[0], e3[0]} == set(G.nodes()): + return "030C" + # e3 == (e1[0], e2[1]) and e2 == (e1[1], e3[1]): + return "030T" + elif num_edges == 4: + for e1, e2, e3, e4 in permutations(G.edges(), 4): + if set(e1) == set(e2): + # identify pair of symmetric edges (which necessarily exists) + if set(e3) == set(e4): + return "201" + if {e3[0]} == {e4[0]} == set(e3).intersection(set(e4)): + return "120D" + if {e3[1]} == {e4[1]} == set(e3).intersection(set(e4)): + return "120U" + if e3[1] == e4[0]: + return "120C" + elif num_edges == 5: + return "210" + elif num_edges == 6: + return "300" + + +@not_implemented_for("undirected") +@py_random_state(1) +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def random_triad(G, seed=None): + """Returns a random triad from a directed graph. + + .. deprecated:: 3.3 + + random_triad is deprecated and will be removed in version 3.5. + Use random sampling directly instead:: + + G.subgraph(random.sample(list(G), 3)) + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G2 : subgraph + A randomly selected triad (order-3 NetworkX DiGraph) + + Raises + ------ + NetworkXError + If the input Graph has less than 3 nodes. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)]) + >>> triad = nx.random_triad(G, seed=1) + >>> triad.edges + OutEdgeView([(1, 2)]) + + """ + import warnings + + warnings.warn( + ( + "\n\nrandom_triad is deprecated and will be removed in NetworkX v3.5.\n" + "Use random.sample instead, e.g.::\n\n" + "\tG.subgraph(random.sample(list(G), 3))\n" + ), + category=DeprecationWarning, + stacklevel=5, + ) + if len(G) < 3: + raise nx.NetworkXError( + f"G needs at least 3 nodes to form a triad; (it has {len(G)} nodes)" + ) + nodes = seed.sample(list(G.nodes()), 3) + G2 = G.subgraph(nodes) + return G2 diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/vitality.py b/venv/lib/python3.10/site-packages/networkx/algorithms/vitality.py new file mode 100644 index 0000000000000000000000000000000000000000..29f98fd1bae5fcbba01d2827d21380098cf9fb5c --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/vitality.py @@ -0,0 +1,76 @@ +""" +Vitality measures. +""" +from functools import partial + +import networkx as nx + +__all__ = ["closeness_vitality"] + + +@nx._dispatchable(edge_attrs="weight") +def closeness_vitality(G, node=None, weight=None, wiener_index=None): + """Returns the closeness vitality for nodes in the graph. + + The *closeness vitality* of a node, defined in Section 3.6.2 of [1], + is the change in the sum of distances between all node pairs when + excluding that node. + + Parameters + ---------- + G : NetworkX graph + A strongly-connected graph. + + weight : string + The name of the edge attribute used as weight. This is passed + directly to the :func:`~networkx.wiener_index` function. + + node : object + If specified, only the closeness vitality for this node will be + returned. Otherwise, a dictionary mapping each node to its + closeness vitality will be returned. + + Other parameters + ---------------- + wiener_index : number + If you have already computed the Wiener index of the graph + `G`, you can provide that value here. Otherwise, it will be + computed for you. + + Returns + ------- + dictionary or float + If `node` is None, this function returns a dictionary + with nodes as keys and closeness vitality as the + value. Otherwise, it returns only the closeness vitality for the + specified `node`. + + The closeness vitality of a node may be negative infinity if + removing that node would disconnect the graph. + + Examples + -------- + >>> G = nx.cycle_graph(3) + >>> nx.closeness_vitality(G) + {0: 2.0, 1: 2.0, 2: 2.0} + + See Also + -------- + closeness_centrality + + References + ---------- + .. [1] Ulrik Brandes, Thomas Erlebach (eds.). + *Network Analysis: Methodological Foundations*. + Springer, 2005. + + + """ + if wiener_index is None: + wiener_index = nx.wiener_index(G, weight=weight) + if node is not None: + after = nx.wiener_index(G.subgraph(set(G) - {node}), weight=weight) + return wiener_index - after + vitality = partial(closeness_vitality, G, weight=weight, wiener_index=wiener_index) + # TODO This can be trivially parallelized. + return {v: vitality(node=v) for v in G} diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/walks.py b/venv/lib/python3.10/site-packages/networkx/algorithms/walks.py new file mode 100644 index 0000000000000000000000000000000000000000..fe341757750dd36163a9f972ae195489f118d84d --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/algorithms/walks.py @@ -0,0 +1,80 @@ +"""Function for computing walks in a graph. +""" + +import networkx as nx + +__all__ = ["number_of_walks"] + + +@nx._dispatchable +def number_of_walks(G, walk_length): + """Returns the number of walks connecting each pair of nodes in `G` + + A *walk* is a sequence of nodes in which each adjacent pair of nodes + in the sequence is adjacent in the graph. A walk can repeat the same + edge and go in the opposite direction just as people can walk on a + set of paths, but standing still is not counted as part of the walk. + + This function only counts the walks with `walk_length` edges. Note that + the number of nodes in the walk sequence is one more than `walk_length`. + The number of walks can grow very quickly on a larger graph + and with a larger walk length. + + Parameters + ---------- + G : NetworkX graph + + walk_length : int + A nonnegative integer representing the length of a walk. + + Returns + ------- + dict + A dictionary of dictionaries in which outer keys are source + nodes, inner keys are target nodes, and inner values are the + number of walks of length `walk_length` connecting those nodes. + + Raises + ------ + ValueError + If `walk_length` is negative + + Examples + -------- + + >>> G = nx.Graph([(0, 1), (1, 2)]) + >>> walks = nx.number_of_walks(G, 2) + >>> walks + {0: {0: 1, 1: 0, 2: 1}, 1: {0: 0, 1: 2, 2: 0}, 2: {0: 1, 1: 0, 2: 1}} + >>> total_walks = sum(sum(tgts.values()) for _, tgts in walks.items()) + + You can also get the number of walks from a specific source node using the + returned dictionary. For example, number of walks of length 1 from node 0 + can be found as follows: + + >>> walks = nx.number_of_walks(G, 1) + >>> walks[0] + {0: 0, 1: 1, 2: 0} + >>> sum(walks[0].values()) # walks from 0 of length 1 + 1 + + Similarly, a target node can also be specified: + + >>> walks[0][1] + 1 + + """ + import numpy as np + + if walk_length < 0: + raise ValueError(f"`walk_length` cannot be negative: {walk_length}") + + A = nx.adjacency_matrix(G, weight=None) + # TODO: Use matrix_power from scipy.sparse when available + # power = sp.sparse.linalg.matrix_power(A, walk_length) + power = np.linalg.matrix_power(A.toarray(), walk_length) + result = { + u: {v: power.item(u_idx, v_idx) for v_idx, v in enumerate(G)} + for u_idx, u in enumerate(G) + } + return result