diff --git a/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/exp_avg.pt b/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/exp_avg.pt new file mode 100644 index 0000000000000000000000000000000000000000..55f9b4b786a1bb623866649057fd07acb3148fa4 --- /dev/null +++ b/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/exp_avg.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e620b851e4a3fce5c371d977f6e2fcfedf155ca3d0835671b09dedae66d86ec6 +size 33555612 diff --git a/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/exp_avg_sq.pt b/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/exp_avg_sq.pt new file mode 100644 index 0000000000000000000000000000000000000000..329df38ba54b22544cd3547fc48dca369753ab92 --- /dev/null +++ b/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/exp_avg_sq.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84591e89abf97761e47dd594d299f8e2e07f3a706e0f59c0c30458e76735ff0e +size 33555627 diff --git a/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/fp32.pt b/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..524a881af9de42b039d3b87ab5735be6789c1f15 --- /dev/null +++ b/ckpts/universal/global_step80/zero/12.mlp.dense_h_to_4h.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a23ea16cbd946b2cc64500c9ef8fbaf8f8029a1f62be17d57c960208ada3245f +size 33555533 diff --git a/ckpts/universal/global_step80/zero/13.attention.dense.weight/fp32.pt b/ckpts/universal/global_step80/zero/13.attention.dense.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..9ab1a0e3db5af2cb2acc51d8ba13bf36b6588318 --- /dev/null +++ b/ckpts/universal/global_step80/zero/13.attention.dense.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04bb285e0ffed0c60cae5f1fee036a1a794c9c81878b943532431a544619a2c6 +size 16778317 diff --git a/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/exp_avg.pt b/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/exp_avg.pt new file mode 100644 index 0000000000000000000000000000000000000000..955099d596cf2084886c5711d12936b3e033fabd --- /dev/null +++ b/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/exp_avg.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03bd6cbd31cd8695ce009cc7ef533c3eea94dc50c4d65ffd4f8d7f57b512df27 +size 9372 diff --git a/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/exp_avg_sq.pt b/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/exp_avg_sq.pt new file mode 100644 index 0000000000000000000000000000000000000000..d297190f3f9e67701f8fa823f36e4695d9d87f77 --- /dev/null +++ b/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/exp_avg_sq.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:75489ffa76c65d6eec0b56cbd15ed24fbe034947827761c104064d8d0b95c4b5 +size 9387 diff --git a/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/fp32.pt b/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..5b065f769ff8759634a83aa583e26ba2f054c546 --- /dev/null +++ b/ckpts/universal/global_step80/zero/19.post_attention_layernorm.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6628d0517973d377ccaecd07a4b901be75139f0d8c6152812828241ae45c1713 +size 9293 diff --git a/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/exp_avg.pt b/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/exp_avg.pt new file mode 100644 index 0000000000000000000000000000000000000000..61e3a59b489b0a182a909906819ca3815c72bce6 --- /dev/null +++ b/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/exp_avg.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:911fc2ecf9d29c7abcbcebcd0f369f480639629dd2e270f273201be66594ca1c +size 50332828 diff --git a/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/exp_avg_sq.pt b/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/exp_avg_sq.pt new file mode 100644 index 0000000000000000000000000000000000000000..7be79bd430476446672d83d0dc9899a8eb0dfda7 --- /dev/null +++ b/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/exp_avg_sq.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7e3a8fa3af23d42b2b5bf9d5dcdc1453c63dd0adf7e7bda0f7447539d32ae342 +size 50332843 diff --git a/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/fp32.pt b/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..cf823288f131ea25d280deaae6d7fd233b13d921 --- /dev/null +++ b/ckpts/universal/global_step80/zero/21.attention.query_key_value.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aae3fee713713428b716568bcbe51545a824bc44746d6c1a55bbed409d3b8ff6 +size 50332749 diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4dfd52d31cdbd786c523c405e32a23a3f5872a66 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..58a306ac3a54fcd4cd7f6fce28ce53e0489b89ea Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7387badd535090635873cdc79d38e81f5eef0b0d Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8c34819d6847262b6ea4795aa4a707f9458a349 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_broadcasting.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_broadcasting.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a99169cdfffd49f927d721f8d072cd84e0b893d Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_broadcasting.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_chains.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_chains.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e1fcc814d3bb3a234e7fd4107daa27529f9464e Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_chains.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_chordal.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_chordal.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3924605e4b2522dbd8b1d411f465c5f833c0c84 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_chordal.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_clique.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_clique.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb8c4780456151224c604b07ee6d006aee694f02 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_clique.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cluster.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cluster.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..65b9642d08892f4f4c22e779b25f12ebaa4cd651 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cluster.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_communicability.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_communicability.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38aec2eddd47e990bbab2c961451ba99a0701378 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_communicability.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3508575daf05a922ef5266051088769cd3ac9f55 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_covering.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_covering.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cfa67184a02dfe1ddcc74addecd3b04b5eea068a Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_covering.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cuts.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cuts.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..32f01b6523279c26339b6ceaac2f55e4da96fe6c Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cuts.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cycles.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cycles.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85547ef0255e85c05800fe348dfdc98a28252ce8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_cycles.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dag.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dag.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e784a24b9bd9749b61ecff44a778a0bbaf7ddae Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dag.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_distance_measures.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_distance_measures.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7fb9d66ac5b9da490a9bea9efd5ddfec7c2f3269 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_distance_measures.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_distance_regular.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_distance_regular.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..216634858a844101fcf8f91354b5182ac4dcfe0e Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_distance_regular.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dominance.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dominance.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2ea134d9579133b7d6c3123fb7c544a8b7f7ba47 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dominance.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dominating.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dominating.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc1bacdc3172cc94044c34b6804e66fec2b5d8d3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_dominating.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_efficiency.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_efficiency.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c45a2c839f07ac57555881c30d6e1c058c688490 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_efficiency.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_euler.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_euler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..49fc0c20d1c4669c8c9b5a881e3ba9b34c1d2b4d Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_euler.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41f3dce5ae27ee4922774eb3b1f5c9e4fb616703 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_graphical.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_graphical.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50e05aed4b31aafbf5846e70ae599f1751cd0510 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_graphical.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_hierarchy.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_hierarchy.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fd9f26190bc56225dc27498e60c419e620ef33fd Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_hierarchy.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_isolate.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_isolate.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..526077a99ffab16dbbe8bb7b4e9c676600f0bd42 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_isolate.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_link_prediction.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_link_prediction.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc83530b92546ded3e4a1b356eacbdffc41e2372 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_link_prediction.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e45818a4b1e514b231f4514664bac57e953562eb Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_matching.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_matching.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..733b96f186bfb59f758993d3294eef54f5a8591f Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_matching.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_max_weight_clique.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_max_weight_clique.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4874c050e5d9da52e6e666323a14584d33743285 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_max_weight_clique.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..898e564be901f8932cf9129e1824b3dadccb4f93 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..232bc2f7ff6cdc9685ea23abeacb170880b0810b Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_node_classification.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_node_classification.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae52fe8ccb26e23dadbd312f6c3d20695c9321dd Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_node_classification.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_non_randomness.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_non_randomness.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..03efc49fb87e99571506bca22b9468d5a82bf5cd Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_non_randomness.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_planar_drawing.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_planar_drawing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50444c791b5c941c62686e0ee5d714d3bb62f712 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_planar_drawing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_planarity.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_planarity.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b26d87fa871b5195adfe32a55b0770bb295138a9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_planarity.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d470eaf90fd74cd924d462c2ee72fb35649505d Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_reciprocity.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_reciprocity.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..221d6cbe343087ed102d1ffd38bde66cfd140e98 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_reciprocity.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_regular.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_regular.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..65c9103a567f445bedb5490cbe06a40d234b723d Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_regular.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a20d5f4708f664d02b72e89b4cd3e098c8e06212 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_similarity.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_similarity.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0457ba38fa5296819b6195f2c0dfff930b573038 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_similarity.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..12e284f0cf04fac717dc1a569e0ddd3588fa19b4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_smallworld.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_smallworld.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53507d1e3c141c7deb2eab14ee193bfd30d16b66 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_smallworld.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_smetric.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_smetric.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..118475671313fe22869eeb49182fc07a18f06a0b Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_smetric.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_sparsifiers.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_sparsifiers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1fbbae7992c52d553c1c34916165547b555c5917 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_sparsifiers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_structuralholes.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_structuralholes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ca37e1b144794c7b567b2b0656cad3ee0d9c5ef Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_structuralholes.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_summarization.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_summarization.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6aa59747f2c529778ee48c6fa4b339f0e471156f Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_summarization.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..132c7764c9f6cdbcfd5c3928f74f951b5de80ce2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e3cde18120fd1c266dbcfd82c86efe381e30599 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c301e8e0e0ef4c883fbc50f0e3bd9bc6389dc80 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..72dddac834398c0eeaa456bcb73c3b79a1b13434 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_vitality.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_vitality.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b73ddbc5971e00762171135851fdba9cad45697 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_vitality.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_voronoi.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_voronoi.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af7e5e21c0fc1722e025571c71f297bd971cb54a Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_voronoi.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_walks.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_walks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52a61c7ad318be622e5ef467b83d72dcc5161177 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_walks.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_wiener.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_wiener.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36c8cee6346bef615c347effd3fea0caf4260cb0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_wiener.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__init__.py b/venv/lib/python3.10/site-packages/networkx/readwrite/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f65509824a10732435057c0fdb3ed0d90636b63c --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/__init__.py @@ -0,0 +1,18 @@ +""" +A package for reading and writing graphs in various formats. + +""" + + +from networkx.readwrite.adjlist import * +from networkx.readwrite.multiline_adjlist import * +from networkx.readwrite.edgelist import * +from networkx.readwrite.pajek import * +from networkx.readwrite.leda import * +from networkx.readwrite.sparse6 import * +from networkx.readwrite.graph6 import * +from networkx.readwrite.gml import * +from networkx.readwrite.graphml import * +from networkx.readwrite.gexf import * +from networkx.readwrite.json_graph import * +from networkx.readwrite.text import * diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c50914580a73783b144f00575fdb0eb47a6cbd7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..26fd11eadad6d92fd435d361546778e2167e7a8c Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/gexf.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/gexf.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3157dd9e14a45cb810a577772464605df94b028 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/gexf.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/gml.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/gml.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da62ade872acfc3a7e578216251c7c9cf50b5cd8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/gml.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/graph6.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/graph6.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c23fd96121e6cf7334d2010a07722ac3c1d9fc0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/graph6.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/leda.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/leda.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d4c55388ee000df7f5c29d23726058ae4cbe24fc Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/leda.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/pajek.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/pajek.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab8d3e057fb33545c0150be7666b581449b98429 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/pajek.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/text.cpython-310.pyc b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/text.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ef6b755951117104567597aa22f2292941469b0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/networkx/readwrite/__pycache__/text.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/adjlist.py b/venv/lib/python3.10/site-packages/networkx/readwrite/adjlist.py new file mode 100644 index 0000000000000000000000000000000000000000..56a1b4d2bf254f4f82e5cc972c11e9acd3b423c0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/adjlist.py @@ -0,0 +1,310 @@ +""" +************** +Adjacency List +************** +Read and write NetworkX graphs as adjacency lists. + +Adjacency list format is useful for graphs without data associated +with nodes or edges and for nodes that can be meaningfully represented +as strings. + +Format +------ +The adjacency list format consists of lines with node labels. The +first label in a line is the source node. Further labels in the line +are considered target nodes and are added to the graph along with an edge +between the source node and target node. + +The graph with edges a-b, a-c, d-e can be represented as the following +adjacency list (anything following the # in a line is a comment):: + + a b c # source target target + d e +""" + +__all__ = ["generate_adjlist", "write_adjlist", "parse_adjlist", "read_adjlist"] + +import networkx as nx +from networkx.utils import open_file + + +def generate_adjlist(G, delimiter=" "): + """Generate a single line of the graph G in adjacency list format. + + Parameters + ---------- + G : NetworkX graph + + delimiter : string, optional + Separator for node labels + + Returns + ------- + lines : string + Lines of data in adjlist format. + + Examples + -------- + >>> G = nx.lollipop_graph(4, 3) + >>> for line in nx.generate_adjlist(G): + ... print(line) + 0 1 2 3 + 1 2 3 + 2 3 + 3 4 + 4 5 + 5 6 + 6 + + See Also + -------- + write_adjlist, read_adjlist + + Notes + ----- + The default `delimiter=" "` will result in unexpected results if node names contain + whitespace characters. To avoid this problem, specify an alternate delimiter when spaces are + valid in node names. + + NB: This option is not available for data that isn't user-generated. + + """ + directed = G.is_directed() + seen = set() + for s, nbrs in G.adjacency(): + line = str(s) + delimiter + for t, data in nbrs.items(): + if not directed and t in seen: + continue + if G.is_multigraph(): + for d in data.values(): + line += str(t) + delimiter + else: + line += str(t) + delimiter + if not directed: + seen.add(s) + yield line[: -len(delimiter)] + + +@open_file(1, mode="wb") +def write_adjlist(G, path, comments="#", delimiter=" ", encoding="utf-8"): + """Write graph G in single-line adjacency-list format to path. + + + Parameters + ---------- + G : NetworkX graph + + path : string or file + Filename or file handle for data output. + Filenames ending in .gz or .bz2 will be compressed. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels + + encoding : string, optional + Text encoding. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_adjlist(G, "test.adjlist") + + The path can be a filehandle or a string with the name of the file. If a + filehandle is provided, it has to be opened in 'wb' mode. + + >>> fh = open("test.adjlist", "wb") + >>> nx.write_adjlist(G, fh) + + Notes + ----- + The default `delimiter=" "` will result in unexpected results if node names contain + whitespace characters. To avoid this problem, specify an alternate delimiter when spaces are + valid in node names. + NB: This option is not available for data that isn't user-generated. + + This format does not store graph, node, or edge data. + + See Also + -------- + read_adjlist, generate_adjlist + """ + import sys + import time + + pargs = comments + " ".join(sys.argv) + "\n" + header = ( + pargs + + comments + + f" GMT {time.asctime(time.gmtime())}\n" + + comments + + f" {G.name}\n" + ) + path.write(header.encode(encoding)) + + for line in generate_adjlist(G, delimiter): + line += "\n" + path.write(line.encode(encoding)) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_adjlist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None +): + """Parse lines of a graph adjacency list representation. + + Parameters + ---------- + lines : list or iterator of strings + Input data in adjlist format + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + nodetype : Python type, optional + Convert nodes to this type. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels. The default is whitespace. + + Returns + ------- + G: NetworkX graph + The graph corresponding to the lines in adjacency list format. + + Examples + -------- + >>> lines = ["1 2 5", "2 3 4", "3 5", "4", "5"] + >>> G = nx.parse_adjlist(lines, nodetype=int) + >>> nodes = [1, 2, 3, 4, 5] + >>> all(node in G for node in nodes) + True + >>> edges = [(1, 2), (1, 5), (2, 3), (2, 4), (3, 5)] + >>> all((u, v) in G.edges() or (v, u) in G.edges() for (u, v) in edges) + True + + See Also + -------- + read_adjlist + + """ + G = nx.empty_graph(0, create_using) + for line in lines: + p = line.find(comments) + if p >= 0: + line = line[:p] + if not len(line): + continue + vlist = line.strip().split(delimiter) + u = vlist.pop(0) + # convert types + if nodetype is not None: + try: + u = nodetype(u) + except BaseException as err: + raise TypeError( + f"Failed to convert node ({u}) to type {nodetype}" + ) from err + G.add_node(u) + if nodetype is not None: + try: + vlist = list(map(nodetype, vlist)) + except BaseException as err: + raise TypeError( + f"Failed to convert nodes ({','.join(vlist)}) to type {nodetype}" + ) from err + G.add_edges_from([(u, v) for v in vlist]) + return G + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_adjlist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + encoding="utf-8", +): + """Read graph in adjacency list format from path. + + Parameters + ---------- + path : string or file + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be uncompressed. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + nodetype : Python type, optional + Convert nodes to this type. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels. The default is whitespace. + + Returns + ------- + G: NetworkX graph + The graph corresponding to the lines in adjacency list format. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_adjlist(G, "test.adjlist") + >>> G = nx.read_adjlist("test.adjlist") + + The path can be a filehandle or a string with the name of the file. If a + filehandle is provided, it has to be opened in 'rb' mode. + + >>> fh = open("test.adjlist", "rb") + >>> G = nx.read_adjlist(fh) + + Filenames ending in .gz or .bz2 will be compressed. + + >>> nx.write_adjlist(G, "test.adjlist.gz") + >>> G = nx.read_adjlist("test.adjlist.gz") + + The optional nodetype is a function to convert node strings to nodetype. + + For example + + >>> G = nx.read_adjlist("test.adjlist", nodetype=int) + + will attempt to convert all nodes to integer type. + + Since nodes must be hashable, the function nodetype must return hashable + types (e.g. int, float, str, frozenset - or tuples of those, etc.) + + The optional create_using parameter indicates the type of NetworkX graph + created. The default is `nx.Graph`, an undirected graph. + To read the data as a directed graph use + + >>> G = nx.read_adjlist("test.adjlist", create_using=nx.DiGraph) + + Notes + ----- + This format does not store graph or node data. + + See Also + -------- + write_adjlist + """ + lines = (line.decode(encoding) for line in path) + return parse_adjlist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + ) diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/edgelist.py b/venv/lib/python3.10/site-packages/networkx/readwrite/edgelist.py new file mode 100644 index 0000000000000000000000000000000000000000..ed445c80b423f34a3a005c79c8a10643175669a0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/edgelist.py @@ -0,0 +1,489 @@ +""" +********** +Edge Lists +********** +Read and write NetworkX graphs as edge lists. + +The multi-line adjacency list format is useful for graphs with nodes +that can be meaningfully represented as strings. With the edgelist +format simple edge data can be stored but node or graph data is not. +There is no way of representing isolated nodes unless the node has a +self-loop edge. + +Format +------ +You can read or write three formats of edge lists with these functions. + +Node pairs with no data:: + + 1 2 + +Python dictionary as data:: + + 1 2 {'weight':7, 'color':'green'} + +Arbitrary data:: + + 1 2 7 green +""" + +__all__ = [ + "generate_edgelist", + "write_edgelist", + "parse_edgelist", + "read_edgelist", + "read_weighted_edgelist", + "write_weighted_edgelist", +] + +import networkx as nx +from networkx.utils import open_file + + +def generate_edgelist(G, delimiter=" ", data=True): + """Generate a single line of the graph G in edge list format. + + Parameters + ---------- + G : NetworkX graph + + delimiter : string, optional + Separator for node labels + + data : bool or list of keys + If False generate no edge data. If True use a dictionary + representation of edge data. If a list of keys use a list of data + values corresponding to the keys. + + Returns + ------- + lines : string + Lines of data in adjlist format. + + Examples + -------- + >>> G = nx.lollipop_graph(4, 3) + >>> G[1][2]["weight"] = 3 + >>> G[3][4]["capacity"] = 12 + >>> for line in nx.generate_edgelist(G, data=False): + ... print(line) + 0 1 + 0 2 + 0 3 + 1 2 + 1 3 + 2 3 + 3 4 + 4 5 + 5 6 + + >>> for line in nx.generate_edgelist(G): + ... print(line) + 0 1 {} + 0 2 {} + 0 3 {} + 1 2 {'weight': 3} + 1 3 {} + 2 3 {} + 3 4 {'capacity': 12} + 4 5 {} + 5 6 {} + + >>> for line in nx.generate_edgelist(G, data=["weight"]): + ... print(line) + 0 1 + 0 2 + 0 3 + 1 2 3 + 1 3 + 2 3 + 3 4 + 4 5 + 5 6 + + See Also + -------- + write_adjlist, read_adjlist + """ + if data is True: + for u, v, d in G.edges(data=True): + e = u, v, dict(d) + yield delimiter.join(map(str, e)) + elif data is False: + for u, v in G.edges(data=False): + e = u, v + yield delimiter.join(map(str, e)) + else: + for u, v, d in G.edges(data=True): + e = [u, v] + try: + e.extend(d[k] for k in data) + except KeyError: + pass # missing data for this edge, should warn? + yield delimiter.join(map(str, e)) + + +@open_file(1, mode="wb") +def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"): + """Write graph as a list of edges. + + Parameters + ---------- + G : graph + A NetworkX graph + path : file or string + File or filename to write. If a file is provided, it must be + opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed. + comments : string, optional + The character used to indicate the start of a comment + delimiter : string, optional + The string used to separate values. The default is whitespace. + data : bool or list, optional + If False write no edge data. + If True write a string representation of the edge data dictionary.. + If a list (or other iterable) is provided, write the keys specified + in the list. + encoding: string, optional + Specify which encoding to use when writing file. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_edgelist(G, "test.edgelist") + >>> G = nx.path_graph(4) + >>> fh = open("test.edgelist", "wb") + >>> nx.write_edgelist(G, fh) + >>> nx.write_edgelist(G, "test.edgelist.gz") + >>> nx.write_edgelist(G, "test.edgelist.gz", data=False) + + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=7, color="red") + >>> nx.write_edgelist(G, "test.edgelist", data=False) + >>> nx.write_edgelist(G, "test.edgelist", data=["color"]) + >>> nx.write_edgelist(G, "test.edgelist", data=["color", "weight"]) + + See Also + -------- + read_edgelist + write_weighted_edgelist + """ + + for line in generate_edgelist(G, delimiter, data): + line += "\n" + path.write(line.encode(encoding)) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_edgelist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True +): + """Parse lines of an edge list representation of a graph. + + Parameters + ---------- + lines : list or iterator of strings + Input data in edgelist format + comments : string, optional + Marker for comment lines. Default is `'#'`. To specify that no character + should be treated as a comment, use ``comments=None``. + delimiter : string, optional + Separator for node labels. Default is `None`, meaning any whitespace. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + nodetype : Python type, optional + Convert nodes to this type. Default is `None`, meaning no conversion is + performed. + data : bool or list of (label,type) tuples + If `False` generate no edge data or if `True` use a dictionary + representation of edge data or a list tuples specifying dictionary + key names and types for edge data. + + Returns + ------- + G: NetworkX Graph + The graph corresponding to lines + + Examples + -------- + Edgelist with no data: + + >>> lines = ["1 2", "2 3", "3 4"] + >>> G = nx.parse_edgelist(lines, nodetype=int) + >>> list(G) + [1, 2, 3, 4] + >>> list(G.edges()) + [(1, 2), (2, 3), (3, 4)] + + Edgelist with data in Python dictionary representation: + + >>> lines = ["1 2 {'weight': 3}", "2 3 {'weight': 27}", "3 4 {'weight': 3.0}"] + >>> G = nx.parse_edgelist(lines, nodetype=int) + >>> list(G) + [1, 2, 3, 4] + >>> list(G.edges(data=True)) + [(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})] + + Edgelist with data in a list: + + >>> lines = ["1 2 3", "2 3 27", "3 4 3.0"] + >>> G = nx.parse_edgelist(lines, nodetype=int, data=(("weight", float),)) + >>> list(G) + [1, 2, 3, 4] + >>> list(G.edges(data=True)) + [(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})] + + See Also + -------- + read_weighted_edgelist + """ + from ast import literal_eval + + G = nx.empty_graph(0, create_using) + for line in lines: + if comments is not None: + p = line.find(comments) + if p >= 0: + line = line[:p] + if not line: + continue + # split line, should have 2 or more + s = line.strip().split(delimiter) + if len(s) < 2: + continue + u = s.pop(0) + v = s.pop(0) + d = s + if nodetype is not None: + try: + u = nodetype(u) + v = nodetype(v) + except Exception as err: + raise TypeError( + f"Failed to convert nodes {u},{v} to type {nodetype}." + ) from err + + if len(d) == 0 or data is False: + # no data or data type specified + edgedata = {} + elif data is True: + # no edge types specified + try: # try to evaluate as dictionary + if delimiter == ",": + edgedata_str = ",".join(d) + else: + edgedata_str = " ".join(d) + edgedata = dict(literal_eval(edgedata_str.strip())) + except Exception as err: + raise TypeError( + f"Failed to convert edge data ({d}) to dictionary." + ) from err + else: + # convert edge data to dictionary with specified keys and type + if len(d) != len(data): + raise IndexError( + f"Edge data {d} and data_keys {data} are not the same length" + ) + edgedata = {} + for (edge_key, edge_type), edge_value in zip(data, d): + try: + edge_value = edge_type(edge_value) + except Exception as err: + raise TypeError( + f"Failed to convert {edge_key} data {edge_value} " + f"to type {edge_type}." + ) from err + edgedata.update({edge_key: edge_value}) + G.add_edge(u, v, **edgedata) + return G + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + data=True, + edgetype=None, + encoding="utf-8", +): + """Read a graph from a list of edges. + + Parameters + ---------- + path : file or string + File or filename to read. If a file is provided, it must be + opened in 'rb' mode. + Filenames ending in .gz or .bz2 will be uncompressed. + comments : string, optional + The character used to indicate the start of a comment. To specify that + no character should be treated as a comment, use ``comments=None``. + delimiter : string, optional + The string used to separate values. The default is whitespace. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + nodetype : int, float, str, Python type, optional + Convert node data from strings to specified type + data : bool or list of (label,type) tuples + Tuples specifying dictionary key names and types for edge data + edgetype : int, float, str, Python type, optional OBSOLETE + Convert edge data from strings to specified type and use as 'weight' + encoding: string, optional + Specify which encoding to use when reading file. + + Returns + ------- + G : graph + A networkx Graph or other type specified with create_using + + Examples + -------- + >>> nx.write_edgelist(nx.path_graph(4), "test.edgelist") + >>> G = nx.read_edgelist("test.edgelist") + + >>> fh = open("test.edgelist", "rb") + >>> G = nx.read_edgelist(fh) + >>> fh.close() + + >>> G = nx.read_edgelist("test.edgelist", nodetype=int) + >>> G = nx.read_edgelist("test.edgelist", create_using=nx.DiGraph) + + Edgelist with data in a list: + + >>> textline = "1 2 3" + >>> fh = open("test.edgelist", "w") + >>> d = fh.write(textline) + >>> fh.close() + >>> G = nx.read_edgelist("test.edgelist", nodetype=int, data=(("weight", float),)) + >>> list(G) + [1, 2] + >>> list(G.edges(data=True)) + [(1, 2, {'weight': 3.0})] + + See parse_edgelist() for more examples of formatting. + + See Also + -------- + parse_edgelist + write_edgelist + + Notes + ----- + Since nodes must be hashable, the function nodetype must return hashable + types (e.g. int, float, str, frozenset - or tuples of those, etc.) + """ + lines = (line if isinstance(line, str) else line.decode(encoding) for line in path) + return parse_edgelist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=data, + ) + + +def write_weighted_edgelist(G, path, comments="#", delimiter=" ", encoding="utf-8"): + """Write graph G as a list of edges with numeric weights. + + Parameters + ---------- + G : graph + A NetworkX graph + path : file or string + File or filename to write. If a file is provided, it must be + opened in 'wb' mode. + Filenames ending in .gz or .bz2 will be compressed. + comments : string, optional + The character used to indicate the start of a comment + delimiter : string, optional + The string used to separate values. The default is whitespace. + encoding: string, optional + Specify which encoding to use when writing file. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=7) + >>> nx.write_weighted_edgelist(G, "test.weighted.edgelist") + + See Also + -------- + read_edgelist + write_edgelist + read_weighted_edgelist + """ + write_edgelist( + G, + path, + comments=comments, + delimiter=delimiter, + data=("weight",), + encoding=encoding, + ) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def read_weighted_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + encoding="utf-8", +): + """Read a graph as list of edges with numeric weights. + + Parameters + ---------- + path : file or string + File or filename to read. If a file is provided, it must be + opened in 'rb' mode. + Filenames ending in .gz or .bz2 will be uncompressed. + comments : string, optional + The character used to indicate the start of a comment. + delimiter : string, optional + The string used to separate values. The default is whitespace. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + nodetype : int, float, str, Python type, optional + Convert node data from strings to specified type + encoding: string, optional + Specify which encoding to use when reading file. + + Returns + ------- + G : graph + A networkx Graph or other type specified with create_using + + Notes + ----- + Since nodes must be hashable, the function nodetype must return hashable + types (e.g. int, float, str, frozenset - or tuples of those, etc.) + + Example edgelist file format. + + With numeric edge data:: + + # read with + # >>> G=nx.read_weighted_edgelist(fh) + # source target data + a b 1 + a c 3.14159 + d e 42 + + See Also + -------- + write_weighted_edgelist + """ + return read_edgelist( + path, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=(("weight", float),), + encoding=encoding, + ) diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/gexf.py b/venv/lib/python3.10/site-packages/networkx/readwrite/gexf.py new file mode 100644 index 0000000000000000000000000000000000000000..16b864377bddc8e249dfbbfd3cce2442f09fe7f2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/gexf.py @@ -0,0 +1,1065 @@ +"""Read and write graphs in GEXF format. + +.. warning:: + This parser uses the standard xml library present in Python, which is + insecure - see :external+python:mod:`xml` for additional information. + Only parse GEFX files you trust. + +GEXF (Graph Exchange XML Format) is a language for describing complex +network structures, their associated data and dynamics. + +This implementation does not support mixed graphs (directed and +undirected edges together). + +Format +------ +GEXF is an XML format. See http://gexf.net/schema.html for the +specification and http://gexf.net/basic.html for examples. +""" +import itertools +import time +from xml.etree.ElementTree import ( + Element, + ElementTree, + SubElement, + register_namespace, + tostring, +) + +import networkx as nx +from networkx.utils import open_file + +__all__ = ["write_gexf", "read_gexf", "relabel_gexf_graph", "generate_gexf"] + + +@open_file(1, mode="wb") +def write_gexf(G, path, encoding="utf-8", prettyprint=True, version="1.2draft"): + """Write G in GEXF format to path. + + "GEXF (Graph Exchange XML Format) is a language for describing + complex networks structures, their associated data and dynamics" [1]_. + + Node attributes are checked according to the version of the GEXF + schemas used for parameters which are not user defined, + e.g. visualization 'viz' [2]_. See example for usage. + + Parameters + ---------- + G : graph + A NetworkX graph + path : file or string + File or file name to write. + File names ending in .gz or .bz2 will be compressed. + encoding : string (optional, default: 'utf-8') + Encoding for text data. + prettyprint : bool (optional, default: True) + If True use line breaks and indenting in output XML. + version: string (optional, default: '1.2draft') + The version of GEXF to be used for nodes attributes checking + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_gexf(G, "test.gexf") + + # visualization data + >>> G.nodes[0]["viz"] = {"size": 54} + >>> G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1} + >>> G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256} + + + Notes + ----- + This implementation does not support mixed graphs (directed and undirected + edges together). + + The node id attribute is set to be the string of the node label. + If you want to specify an id use set it as node data, e.g. + node['a']['id']=1 to set the id of node 'a' to 1. + + References + ---------- + .. [1] GEXF File Format, http://gexf.net/ + .. [2] GEXF schema, http://gexf.net/schema.html + """ + writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint, version=version) + writer.add_graph(G) + writer.write(path) + + +def generate_gexf(G, encoding="utf-8", prettyprint=True, version="1.2draft"): + """Generate lines of GEXF format representation of G. + + "GEXF (Graph Exchange XML Format) is a language for describing + complex networks structures, their associated data and dynamics" [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph + encoding : string (optional, default: 'utf-8') + Encoding for text data. + prettyprint : bool (optional, default: True) + If True use line breaks and indenting in output XML. + version : string (default: 1.2draft) + Version of GEFX File Format (see http://gexf.net/schema.html) + Supported values: "1.1draft", "1.2draft" + + + Examples + -------- + >>> G = nx.path_graph(4) + >>> linefeed = chr(10) # linefeed=\n + >>> s = linefeed.join(nx.generate_gexf(G)) + >>> for line in nx.generate_gexf(G): # doctest: +SKIP + ... print(line) + + Notes + ----- + This implementation does not support mixed graphs (directed and undirected + edges together). + + The node id attribute is set to be the string of the node label. + If you want to specify an id use set it as node data, e.g. + node['a']['id']=1 to set the id of node 'a' to 1. + + References + ---------- + .. [1] GEXF File Format, https://gephi.org/gexf/format/ + """ + writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint, version=version) + writer.add_graph(G) + yield from str(writer).splitlines() + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_gexf(path, node_type=None, relabel=False, version="1.2draft"): + """Read graph in GEXF format from path. + + "GEXF (Graph Exchange XML Format) is a language for describing + complex networks structures, their associated data and dynamics" [1]_. + + Parameters + ---------- + path : file or string + File or file name to read. + File names ending in .gz or .bz2 will be decompressed. + node_type: Python type (default: None) + Convert node ids to this type if not None. + relabel : bool (default: False) + If True relabel the nodes to use the GEXF node "label" attribute + instead of the node "id" attribute as the NetworkX node label. + version : string (default: 1.2draft) + Version of GEFX File Format (see http://gexf.net/schema.html) + Supported values: "1.1draft", "1.2draft" + + Returns + ------- + graph: NetworkX graph + If no parallel edges are found a Graph or DiGraph is returned. + Otherwise a MultiGraph or MultiDiGraph is returned. + + Notes + ----- + This implementation does not support mixed graphs (directed and undirected + edges together). + + References + ---------- + .. [1] GEXF File Format, http://gexf.net/ + """ + reader = GEXFReader(node_type=node_type, version=version) + if relabel: + G = relabel_gexf_graph(reader(path)) + else: + G = reader(path) + return G + + +class GEXF: + versions = { + "1.1draft": { + "NS_GEXF": "http://www.gexf.net/1.1draft", + "NS_VIZ": "http://www.gexf.net/1.1draft/viz", + "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance", + "SCHEMALOCATION": " ".join( + [ + "http://www.gexf.net/1.1draft", + "http://www.gexf.net/1.1draft/gexf.xsd", + ] + ), + "VERSION": "1.1", + }, + "1.2draft": { + "NS_GEXF": "http://www.gexf.net/1.2draft", + "NS_VIZ": "http://www.gexf.net/1.2draft/viz", + "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance", + "SCHEMALOCATION": " ".join( + [ + "http://www.gexf.net/1.2draft", + "http://www.gexf.net/1.2draft/gexf.xsd", + ] + ), + "VERSION": "1.2", + }, + } + + def construct_types(self): + types = [ + (int, "integer"), + (float, "float"), + (float, "double"), + (bool, "boolean"), + (list, "string"), + (dict, "string"), + (int, "long"), + (str, "liststring"), + (str, "anyURI"), + (str, "string"), + ] + + # These additions to types allow writing numpy types + try: + import numpy as np + except ImportError: + pass + else: + # prepend so that python types are created upon read (last entry wins) + types = [ + (np.float64, "float"), + (np.float32, "float"), + (np.float16, "float"), + (np.int_, "int"), + (np.int8, "int"), + (np.int16, "int"), + (np.int32, "int"), + (np.int64, "int"), + (np.uint8, "int"), + (np.uint16, "int"), + (np.uint32, "int"), + (np.uint64, "int"), + (np.int_, "int"), + (np.intc, "int"), + (np.intp, "int"), + ] + types + + self.xml_type = dict(types) + self.python_type = dict(reversed(a) for a in types) + + # http://www.w3.org/TR/xmlschema-2/#boolean + convert_bool = { + "true": True, + "false": False, + "True": True, + "False": False, + "0": False, + 0: False, + "1": True, + 1: True, + } + + def set_version(self, version): + d = self.versions.get(version) + if d is None: + raise nx.NetworkXError(f"Unknown GEXF version {version}.") + self.NS_GEXF = d["NS_GEXF"] + self.NS_VIZ = d["NS_VIZ"] + self.NS_XSI = d["NS_XSI"] + self.SCHEMALOCATION = d["SCHEMALOCATION"] + self.VERSION = d["VERSION"] + self.version = version + + +class GEXFWriter(GEXF): + # class for writing GEXF format files + # use write_gexf() function + def __init__( + self, graph=None, encoding="utf-8", prettyprint=True, version="1.2draft" + ): + self.construct_types() + self.prettyprint = prettyprint + self.encoding = encoding + self.set_version(version) + self.xml = Element( + "gexf", + { + "xmlns": self.NS_GEXF, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + "version": self.VERSION, + }, + ) + + # Make meta element a non-graph element + # Also add lastmodifieddate as attribute, not tag + meta_element = Element("meta") + subelement_text = f"NetworkX {nx.__version__}" + SubElement(meta_element, "creator").text = subelement_text + meta_element.set("lastmodifieddate", time.strftime("%Y-%m-%d")) + self.xml.append(meta_element) + + register_namespace("viz", self.NS_VIZ) + + # counters for edge and attribute identifiers + self.edge_id = itertools.count() + self.attr_id = itertools.count() + self.all_edge_ids = set() + # default attributes are stored in dictionaries + self.attr = {} + self.attr["node"] = {} + self.attr["edge"] = {} + self.attr["node"]["dynamic"] = {} + self.attr["node"]["static"] = {} + self.attr["edge"]["dynamic"] = {} + self.attr["edge"]["static"] = {} + + if graph is not None: + self.add_graph(graph) + + def __str__(self): + if self.prettyprint: + self.indent(self.xml) + s = tostring(self.xml).decode(self.encoding) + return s + + def add_graph(self, G): + # first pass through G collecting edge ids + for u, v, dd in G.edges(data=True): + eid = dd.get("id") + if eid is not None: + self.all_edge_ids.add(str(eid)) + # set graph attributes + if G.graph.get("mode") == "dynamic": + mode = "dynamic" + else: + mode = "static" + # Add a graph element to the XML + if G.is_directed(): + default = "directed" + else: + default = "undirected" + name = G.graph.get("name", "") + graph_element = Element("graph", defaultedgetype=default, mode=mode, name=name) + self.graph_element = graph_element + self.add_nodes(G, graph_element) + self.add_edges(G, graph_element) + self.xml.append(graph_element) + + def add_nodes(self, G, graph_element): + nodes_element = Element("nodes") + for node, data in G.nodes(data=True): + node_data = data.copy() + node_id = str(node_data.pop("id", node)) + kw = {"id": node_id} + label = str(node_data.pop("label", node)) + kw["label"] = label + try: + pid = node_data.pop("pid") + kw["pid"] = str(pid) + except KeyError: + pass + try: + start = node_data.pop("start") + kw["start"] = str(start) + self.alter_graph_mode_timeformat(start) + except KeyError: + pass + try: + end = node_data.pop("end") + kw["end"] = str(end) + self.alter_graph_mode_timeformat(end) + except KeyError: + pass + # add node element with attributes + node_element = Element("node", **kw) + # add node element and attr subelements + default = G.graph.get("node_default", {}) + node_data = self.add_parents(node_element, node_data) + if self.VERSION == "1.1": + node_data = self.add_slices(node_element, node_data) + else: + node_data = self.add_spells(node_element, node_data) + node_data = self.add_viz(node_element, node_data) + node_data = self.add_attributes("node", node_element, node_data, default) + nodes_element.append(node_element) + graph_element.append(nodes_element) + + def add_edges(self, G, graph_element): + def edge_key_data(G): + # helper function to unify multigraph and graph edge iterator + if G.is_multigraph(): + for u, v, key, data in G.edges(data=True, keys=True): + edge_data = data.copy() + edge_data.update(key=key) + edge_id = edge_data.pop("id", None) + if edge_id is None: + edge_id = next(self.edge_id) + while str(edge_id) in self.all_edge_ids: + edge_id = next(self.edge_id) + self.all_edge_ids.add(str(edge_id)) + yield u, v, edge_id, edge_data + else: + for u, v, data in G.edges(data=True): + edge_data = data.copy() + edge_id = edge_data.pop("id", None) + if edge_id is None: + edge_id = next(self.edge_id) + while str(edge_id) in self.all_edge_ids: + edge_id = next(self.edge_id) + self.all_edge_ids.add(str(edge_id)) + yield u, v, edge_id, edge_data + + edges_element = Element("edges") + for u, v, key, edge_data in edge_key_data(G): + kw = {"id": str(key)} + try: + edge_label = edge_data.pop("label") + kw["label"] = str(edge_label) + except KeyError: + pass + try: + edge_weight = edge_data.pop("weight") + kw["weight"] = str(edge_weight) + except KeyError: + pass + try: + edge_type = edge_data.pop("type") + kw["type"] = str(edge_type) + except KeyError: + pass + try: + start = edge_data.pop("start") + kw["start"] = str(start) + self.alter_graph_mode_timeformat(start) + except KeyError: + pass + try: + end = edge_data.pop("end") + kw["end"] = str(end) + self.alter_graph_mode_timeformat(end) + except KeyError: + pass + source_id = str(G.nodes[u].get("id", u)) + target_id = str(G.nodes[v].get("id", v)) + edge_element = Element("edge", source=source_id, target=target_id, **kw) + default = G.graph.get("edge_default", {}) + if self.VERSION == "1.1": + edge_data = self.add_slices(edge_element, edge_data) + else: + edge_data = self.add_spells(edge_element, edge_data) + edge_data = self.add_viz(edge_element, edge_data) + edge_data = self.add_attributes("edge", edge_element, edge_data, default) + edges_element.append(edge_element) + graph_element.append(edges_element) + + def add_attributes(self, node_or_edge, xml_obj, data, default): + # Add attrvalues to node or edge + attvalues = Element("attvalues") + if len(data) == 0: + return data + mode = "static" + for k, v in data.items(): + # rename generic multigraph key to avoid any name conflict + if k == "key": + k = "networkx_key" + val_type = type(v) + if val_type not in self.xml_type: + raise TypeError(f"attribute value type is not allowed: {val_type}") + if isinstance(v, list): + # dynamic data + for val, start, end in v: + val_type = type(val) + if start is not None or end is not None: + mode = "dynamic" + self.alter_graph_mode_timeformat(start) + self.alter_graph_mode_timeformat(end) + break + attr_id = self.get_attr_id( + str(k), self.xml_type[val_type], node_or_edge, default, mode + ) + for val, start, end in v: + e = Element("attvalue") + e.attrib["for"] = attr_id + e.attrib["value"] = str(val) + # Handle nan, inf, -inf differently + if val_type == float: + if e.attrib["value"] == "inf": + e.attrib["value"] = "INF" + elif e.attrib["value"] == "nan": + e.attrib["value"] = "NaN" + elif e.attrib["value"] == "-inf": + e.attrib["value"] = "-INF" + if start is not None: + e.attrib["start"] = str(start) + if end is not None: + e.attrib["end"] = str(end) + attvalues.append(e) + else: + # static data + mode = "static" + attr_id = self.get_attr_id( + str(k), self.xml_type[val_type], node_or_edge, default, mode + ) + e = Element("attvalue") + e.attrib["for"] = attr_id + if isinstance(v, bool): + e.attrib["value"] = str(v).lower() + else: + e.attrib["value"] = str(v) + # Handle float nan, inf, -inf differently + if val_type == float: + if e.attrib["value"] == "inf": + e.attrib["value"] = "INF" + elif e.attrib["value"] == "nan": + e.attrib["value"] = "NaN" + elif e.attrib["value"] == "-inf": + e.attrib["value"] = "-INF" + attvalues.append(e) + xml_obj.append(attvalues) + return data + + def get_attr_id(self, title, attr_type, edge_or_node, default, mode): + # find the id of the attribute or generate a new id + try: + return self.attr[edge_or_node][mode][title] + except KeyError: + # generate new id + new_id = str(next(self.attr_id)) + self.attr[edge_or_node][mode][title] = new_id + attr_kwargs = {"id": new_id, "title": title, "type": attr_type} + attribute = Element("attribute", **attr_kwargs) + # add subelement for data default value if present + default_title = default.get(title) + if default_title is not None: + default_element = Element("default") + default_element.text = str(default_title) + attribute.append(default_element) + # new insert it into the XML + attributes_element = None + for a in self.graph_element.findall("attributes"): + # find existing attributes element by class and mode + a_class = a.get("class") + a_mode = a.get("mode", "static") + if a_class == edge_or_node and a_mode == mode: + attributes_element = a + if attributes_element is None: + # create new attributes element + attr_kwargs = {"mode": mode, "class": edge_or_node} + attributes_element = Element("attributes", **attr_kwargs) + self.graph_element.insert(0, attributes_element) + attributes_element.append(attribute) + return new_id + + def add_viz(self, element, node_data): + viz = node_data.pop("viz", False) + if viz: + color = viz.get("color") + if color is not None: + if self.VERSION == "1.1": + e = Element( + f"{{{self.NS_VIZ}}}color", + r=str(color.get("r")), + g=str(color.get("g")), + b=str(color.get("b")), + ) + else: + e = Element( + f"{{{self.NS_VIZ}}}color", + r=str(color.get("r")), + g=str(color.get("g")), + b=str(color.get("b")), + a=str(color.get("a", 1.0)), + ) + element.append(e) + + size = viz.get("size") + if size is not None: + e = Element(f"{{{self.NS_VIZ}}}size", value=str(size)) + element.append(e) + + thickness = viz.get("thickness") + if thickness is not None: + e = Element(f"{{{self.NS_VIZ}}}thickness", value=str(thickness)) + element.append(e) + + shape = viz.get("shape") + if shape is not None: + if shape.startswith("http"): + e = Element( + f"{{{self.NS_VIZ}}}shape", value="image", uri=str(shape) + ) + else: + e = Element(f"{{{self.NS_VIZ}}}shape", value=str(shape)) + element.append(e) + + position = viz.get("position") + if position is not None: + e = Element( + f"{{{self.NS_VIZ}}}position", + x=str(position.get("x")), + y=str(position.get("y")), + z=str(position.get("z")), + ) + element.append(e) + return node_data + + def add_parents(self, node_element, node_data): + parents = node_data.pop("parents", False) + if parents: + parents_element = Element("parents") + for p in parents: + e = Element("parent") + e.attrib["for"] = str(p) + parents_element.append(e) + node_element.append(parents_element) + return node_data + + def add_slices(self, node_or_edge_element, node_or_edge_data): + slices = node_or_edge_data.pop("slices", False) + if slices: + slices_element = Element("slices") + for start, end in slices: + e = Element("slice", start=str(start), end=str(end)) + slices_element.append(e) + node_or_edge_element.append(slices_element) + return node_or_edge_data + + def add_spells(self, node_or_edge_element, node_or_edge_data): + spells = node_or_edge_data.pop("spells", False) + if spells: + spells_element = Element("spells") + for start, end in spells: + e = Element("spell") + if start is not None: + e.attrib["start"] = str(start) + self.alter_graph_mode_timeformat(start) + if end is not None: + e.attrib["end"] = str(end) + self.alter_graph_mode_timeformat(end) + spells_element.append(e) + node_or_edge_element.append(spells_element) + return node_or_edge_data + + def alter_graph_mode_timeformat(self, start_or_end): + # If 'start' or 'end' appears, alter Graph mode to dynamic and + # set timeformat + if self.graph_element.get("mode") == "static": + if start_or_end is not None: + if isinstance(start_or_end, str): + timeformat = "date" + elif isinstance(start_or_end, float): + timeformat = "double" + elif isinstance(start_or_end, int): + timeformat = "long" + else: + raise nx.NetworkXError( + "timeformat should be of the type int, float or str" + ) + self.graph_element.set("timeformat", timeformat) + self.graph_element.set("mode", "dynamic") + + def write(self, fh): + # Serialize graph G in GEXF to the open fh + if self.prettyprint: + self.indent(self.xml) + document = ElementTree(self.xml) + document.write(fh, encoding=self.encoding, xml_declaration=True) + + def indent(self, elem, level=0): + # in-place prettyprint formatter + i = "\n" + " " * level + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + self.indent(elem, level + 1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + +class GEXFReader(GEXF): + # Class to read GEXF format files + # use read_gexf() function + def __init__(self, node_type=None, version="1.2draft"): + self.construct_types() + self.node_type = node_type + # assume simple graph and test for multigraph on read + self.simple_graph = True + self.set_version(version) + + def __call__(self, stream): + self.xml = ElementTree(file=stream) + g = self.xml.find(f"{{{self.NS_GEXF}}}graph") + if g is not None: + return self.make_graph(g) + # try all the versions + for version in self.versions: + self.set_version(version) + g = self.xml.find(f"{{{self.NS_GEXF}}}graph") + if g is not None: + return self.make_graph(g) + raise nx.NetworkXError("No element in GEXF file.") + + def make_graph(self, graph_xml): + # start with empty DiGraph or MultiDiGraph + edgedefault = graph_xml.get("defaultedgetype", None) + if edgedefault == "directed": + G = nx.MultiDiGraph() + else: + G = nx.MultiGraph() + + # graph attributes + graph_name = graph_xml.get("name", "") + if graph_name != "": + G.graph["name"] = graph_name + graph_start = graph_xml.get("start") + if graph_start is not None: + G.graph["start"] = graph_start + graph_end = graph_xml.get("end") + if graph_end is not None: + G.graph["end"] = graph_end + graph_mode = graph_xml.get("mode", "") + if graph_mode == "dynamic": + G.graph["mode"] = "dynamic" + else: + G.graph["mode"] = "static" + + # timeformat + self.timeformat = graph_xml.get("timeformat") + if self.timeformat == "date": + self.timeformat = "string" + + # node and edge attributes + attributes_elements = graph_xml.findall(f"{{{self.NS_GEXF}}}attributes") + # dictionaries to hold attributes and attribute defaults + node_attr = {} + node_default = {} + edge_attr = {} + edge_default = {} + for a in attributes_elements: + attr_class = a.get("class") + if attr_class == "node": + na, nd = self.find_gexf_attributes(a) + node_attr.update(na) + node_default.update(nd) + G.graph["node_default"] = node_default + elif attr_class == "edge": + ea, ed = self.find_gexf_attributes(a) + edge_attr.update(ea) + edge_default.update(ed) + G.graph["edge_default"] = edge_default + else: + raise # unknown attribute class + + # Hack to handle Gephi0.7beta bug + # add weight attribute + ea = {"weight": {"type": "double", "mode": "static", "title": "weight"}} + ed = {} + edge_attr.update(ea) + edge_default.update(ed) + G.graph["edge_default"] = edge_default + + # add nodes + nodes_element = graph_xml.find(f"{{{self.NS_GEXF}}}nodes") + if nodes_element is not None: + for node_xml in nodes_element.findall(f"{{{self.NS_GEXF}}}node"): + self.add_node(G, node_xml, node_attr) + + # add edges + edges_element = graph_xml.find(f"{{{self.NS_GEXF}}}edges") + if edges_element is not None: + for edge_xml in edges_element.findall(f"{{{self.NS_GEXF}}}edge"): + self.add_edge(G, edge_xml, edge_attr) + + # switch to Graph or DiGraph if no parallel edges were found. + if self.simple_graph: + if G.is_directed(): + G = nx.DiGraph(G) + else: + G = nx.Graph(G) + return G + + def add_node(self, G, node_xml, node_attr, node_pid=None): + # add a single node with attributes to the graph + + # get attributes and subattributues for node + data = self.decode_attr_elements(node_attr, node_xml) + data = self.add_parents(data, node_xml) # add any parents + if self.VERSION == "1.1": + data = self.add_slices(data, node_xml) # add slices + else: + data = self.add_spells(data, node_xml) # add spells + data = self.add_viz(data, node_xml) # add viz + data = self.add_start_end(data, node_xml) # add start/end + + # find the node id and cast it to the appropriate type + node_id = node_xml.get("id") + if self.node_type is not None: + node_id = self.node_type(node_id) + + # every node should have a label + node_label = node_xml.get("label") + data["label"] = node_label + + # parent node id + node_pid = node_xml.get("pid", node_pid) + if node_pid is not None: + data["pid"] = node_pid + + # check for subnodes, recursive + subnodes = node_xml.find(f"{{{self.NS_GEXF}}}nodes") + if subnodes is not None: + for node_xml in subnodes.findall(f"{{{self.NS_GEXF}}}node"): + self.add_node(G, node_xml, node_attr, node_pid=node_id) + + G.add_node(node_id, **data) + + def add_start_end(self, data, xml): + # start and end times + ttype = self.timeformat + node_start = xml.get("start") + if node_start is not None: + data["start"] = self.python_type[ttype](node_start) + node_end = xml.get("end") + if node_end is not None: + data["end"] = self.python_type[ttype](node_end) + return data + + def add_viz(self, data, node_xml): + # add viz element for node + viz = {} + color = node_xml.find(f"{{{self.NS_VIZ}}}color") + if color is not None: + if self.VERSION == "1.1": + viz["color"] = { + "r": int(color.get("r")), + "g": int(color.get("g")), + "b": int(color.get("b")), + } + else: + viz["color"] = { + "r": int(color.get("r")), + "g": int(color.get("g")), + "b": int(color.get("b")), + "a": float(color.get("a", 1)), + } + + size = node_xml.find(f"{{{self.NS_VIZ}}}size") + if size is not None: + viz["size"] = float(size.get("value")) + + thickness = node_xml.find(f"{{{self.NS_VIZ}}}thickness") + if thickness is not None: + viz["thickness"] = float(thickness.get("value")) + + shape = node_xml.find(f"{{{self.NS_VIZ}}}shape") + if shape is not None: + viz["shape"] = shape.get("shape") + if viz["shape"] == "image": + viz["shape"] = shape.get("uri") + + position = node_xml.find(f"{{{self.NS_VIZ}}}position") + if position is not None: + viz["position"] = { + "x": float(position.get("x", 0)), + "y": float(position.get("y", 0)), + "z": float(position.get("z", 0)), + } + + if len(viz) > 0: + data["viz"] = viz + return data + + def add_parents(self, data, node_xml): + parents_element = node_xml.find(f"{{{self.NS_GEXF}}}parents") + if parents_element is not None: + data["parents"] = [] + for p in parents_element.findall(f"{{{self.NS_GEXF}}}parent"): + parent = p.get("for") + data["parents"].append(parent) + return data + + def add_slices(self, data, node_or_edge_xml): + slices_element = node_or_edge_xml.find(f"{{{self.NS_GEXF}}}slices") + if slices_element is not None: + data["slices"] = [] + for s in slices_element.findall(f"{{{self.NS_GEXF}}}slice"): + start = s.get("start") + end = s.get("end") + data["slices"].append((start, end)) + return data + + def add_spells(self, data, node_or_edge_xml): + spells_element = node_or_edge_xml.find(f"{{{self.NS_GEXF}}}spells") + if spells_element is not None: + data["spells"] = [] + ttype = self.timeformat + for s in spells_element.findall(f"{{{self.NS_GEXF}}}spell"): + start = self.python_type[ttype](s.get("start")) + end = self.python_type[ttype](s.get("end")) + data["spells"].append((start, end)) + return data + + def add_edge(self, G, edge_element, edge_attr): + # add an edge to the graph + + # raise error if we find mixed directed and undirected edges + edge_direction = edge_element.get("type") + if G.is_directed() and edge_direction == "undirected": + raise nx.NetworkXError("Undirected edge found in directed graph.") + if (not G.is_directed()) and edge_direction == "directed": + raise nx.NetworkXError("Directed edge found in undirected graph.") + + # Get source and target and recast type if required + source = edge_element.get("source") + target = edge_element.get("target") + if self.node_type is not None: + source = self.node_type(source) + target = self.node_type(target) + + data = self.decode_attr_elements(edge_attr, edge_element) + data = self.add_start_end(data, edge_element) + + if self.VERSION == "1.1": + data = self.add_slices(data, edge_element) # add slices + else: + data = self.add_spells(data, edge_element) # add spells + + # GEXF stores edge ids as an attribute + # NetworkX uses them as keys in multigraphs + # if networkx_key is not specified as an attribute + edge_id = edge_element.get("id") + if edge_id is not None: + data["id"] = edge_id + + # check if there is a 'multigraph_key' and use that as edge_id + multigraph_key = data.pop("networkx_key", None) + if multigraph_key is not None: + edge_id = multigraph_key + + weight = edge_element.get("weight") + if weight is not None: + data["weight"] = float(weight) + + edge_label = edge_element.get("label") + if edge_label is not None: + data["label"] = edge_label + + if G.has_edge(source, target): + # seen this edge before - this is a multigraph + self.simple_graph = False + G.add_edge(source, target, key=edge_id, **data) + if edge_direction == "mutual": + G.add_edge(target, source, key=edge_id, **data) + + def decode_attr_elements(self, gexf_keys, obj_xml): + # Use the key information to decode the attr XML + attr = {} + # look for outer '' element + attr_element = obj_xml.find(f"{{{self.NS_GEXF}}}attvalues") + if attr_element is not None: + # loop over elements + for a in attr_element.findall(f"{{{self.NS_GEXF}}}attvalue"): + key = a.get("for") # for is required + try: # should be in our gexf_keys dictionary + title = gexf_keys[key]["title"] + except KeyError as err: + raise nx.NetworkXError(f"No attribute defined for={key}.") from err + atype = gexf_keys[key]["type"] + value = a.get("value") + if atype == "boolean": + value = self.convert_bool[value] + else: + value = self.python_type[atype](value) + if gexf_keys[key]["mode"] == "dynamic": + # for dynamic graphs use list of three-tuples + # [(value1,start1,end1), (value2,start2,end2), etc] + ttype = self.timeformat + start = self.python_type[ttype](a.get("start")) + end = self.python_type[ttype](a.get("end")) + if title in attr: + attr[title].append((value, start, end)) + else: + attr[title] = [(value, start, end)] + else: + # for static graphs just assign the value + attr[title] = value + return attr + + def find_gexf_attributes(self, attributes_element): + # Extract all the attributes and defaults + attrs = {} + defaults = {} + mode = attributes_element.get("mode") + for k in attributes_element.findall(f"{{{self.NS_GEXF}}}attribute"): + attr_id = k.get("id") + title = k.get("title") + atype = k.get("type") + attrs[attr_id] = {"title": title, "type": atype, "mode": mode} + # check for the 'default' subelement of key element and add + default = k.find(f"{{{self.NS_GEXF}}}default") + if default is not None: + if atype == "boolean": + value = self.convert_bool[default.text] + else: + value = self.python_type[atype](default.text) + defaults[title] = value + return attrs, defaults + + +def relabel_gexf_graph(G): + """Relabel graph using "label" node keyword for node label. + + Parameters + ---------- + G : graph + A NetworkX graph read from GEXF data + + Returns + ------- + H : graph + A NetworkX graph with relabeled nodes + + Raises + ------ + NetworkXError + If node labels are missing or not unique while relabel=True. + + Notes + ----- + This function relabels the nodes in a NetworkX graph with the + "label" attribute. It also handles relabeling the specific GEXF + node attributes "parents", and "pid". + """ + # build mapping of node labels, do some error checking + try: + mapping = [(u, G.nodes[u]["label"]) for u in G] + except KeyError as err: + raise nx.NetworkXError( + "Failed to relabel nodes: missing node labels found. Use relabel=False." + ) from err + x, y = zip(*mapping) + if len(set(y)) != len(G): + raise nx.NetworkXError( + "Failed to relabel nodes: " + "duplicate node labels found. " + "Use relabel=False." + ) + mapping = dict(mapping) + H = nx.relabel_nodes(G, mapping) + # relabel attributes + for n in G: + m = mapping[n] + H.nodes[m]["id"] = n + H.nodes[m].pop("label") + if "pid" in H.nodes[m]: + H.nodes[m]["pid"] = mapping[G.nodes[n]["pid"]] + if "parents" in H.nodes[m]: + H.nodes[m]["parents"] = [mapping[p] for p in G.nodes[n]["parents"]] + return H diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/gml.py b/venv/lib/python3.10/site-packages/networkx/readwrite/gml.py new file mode 100644 index 0000000000000000000000000000000000000000..bec99154991f947e7444ef159b29050b75b5f5c2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/gml.py @@ -0,0 +1,878 @@ +""" +Read graphs in GML format. + +"GML, the Graph Modelling Language, is our proposal for a portable +file format for graphs. GML's key features are portability, simple +syntax, extensibility and flexibility. A GML file consists of a +hierarchical key-value lists. Graphs can be annotated with arbitrary +data structures. The idea for a common file format was born at the +GD'95; this proposal is the outcome of many discussions. GML is the +standard file format in the Graphlet graph editor system. It has been +overtaken and adapted by several other systems for drawing graphs." + +GML files are stored using a 7-bit ASCII encoding with any extended +ASCII characters (iso8859-1) appearing as HTML character entities. +You will need to give some thought into how the exported data should +interact with different languages and even different Python versions. +Re-importing from gml is also a concern. + +Without specifying a `stringizer`/`destringizer`, the code is capable of +writing `int`/`float`/`str`/`dict`/`list` data as required by the GML +specification. For writing other data types, and for reading data other +than `str` you need to explicitly supply a `stringizer`/`destringizer`. + +For additional documentation on the GML file format, please see the +`GML website `_. + +Several example graphs in GML format may be found on Mark Newman's +`Network data page `_. +""" +import html.entities as htmlentitydefs +import re +import warnings +from ast import literal_eval +from collections import defaultdict +from enum import Enum +from io import StringIO +from typing import Any, NamedTuple + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils import open_file + +__all__ = ["read_gml", "parse_gml", "generate_gml", "write_gml"] + + +def escape(text): + """Use XML character references to escape characters. + + Use XML character references for unprintable or non-ASCII + characters, double quotes and ampersands in a string + """ + + def fixup(m): + ch = m.group(0) + return "&#" + str(ord(ch)) + ";" + + text = re.sub('[^ -~]|[&"]', fixup, text) + return text if isinstance(text, str) else str(text) + + +def unescape(text): + """Replace XML character references with the referenced characters""" + + def fixup(m): + text = m.group(0) + if text[1] == "#": + # Character reference + if text[2] == "x": + code = int(text[3:-1], 16) + else: + code = int(text[2:-1]) + else: + # Named entity + try: + code = htmlentitydefs.name2codepoint[text[1:-1]] + except KeyError: + return text # leave unchanged + try: + return chr(code) + except (ValueError, OverflowError): + return text # leave unchanged + + return re.sub("&(?:[0-9A-Za-z]+|#(?:[0-9]+|x[0-9A-Fa-f]+));", fixup, text) + + +def literal_destringizer(rep): + """Convert a Python literal to the value it represents. + + Parameters + ---------- + rep : string + A Python literal. + + Returns + ------- + value : object + The value of the Python literal. + + Raises + ------ + ValueError + If `rep` is not a Python literal. + """ + if isinstance(rep, str): + orig_rep = rep + try: + return literal_eval(rep) + except SyntaxError as err: + raise ValueError(f"{orig_rep!r} is not a valid Python literal") from err + else: + raise ValueError(f"{rep!r} is not a string") + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_gml(path, label="label", destringizer=None): + """Read graph in GML format from `path`. + + Parameters + ---------- + path : filename or filehandle + The filename or filehandle to read from. + + label : string, optional + If not None, the parsed nodes will be renamed according to node + attributes indicated by `label`. Default value: 'label'. + + destringizer : callable, optional + A `destringizer` that recovers values stored as strings in GML. If it + cannot convert a string to a value, a `ValueError` is raised. Default + value : None. + + Returns + ------- + G : NetworkX graph + The parsed graph. + + Raises + ------ + NetworkXError + If the input cannot be parsed. + + See Also + -------- + write_gml, parse_gml + literal_destringizer + + Notes + ----- + GML files are stored using a 7-bit ASCII encoding with any extended + ASCII characters (iso8859-1) appearing as HTML character entities. + Without specifying a `stringizer`/`destringizer`, the code is capable of + writing `int`/`float`/`str`/`dict`/`list` data as required by the GML + specification. For writing other data types, and for reading data other + than `str` you need to explicitly supply a `stringizer`/`destringizer`. + + For additional documentation on the GML file format, please see the + `GML url `_. + + See the module docstring :mod:`networkx.readwrite.gml` for more details. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_gml(G, "test.gml") + + GML values are interpreted as strings by default: + + >>> H = nx.read_gml("test.gml") + >>> H.nodes + NodeView(('0', '1', '2', '3')) + + When a `destringizer` is provided, GML values are converted to the provided type. + For example, integer nodes can be recovered as shown below: + + >>> J = nx.read_gml("test.gml", destringizer=int) + >>> J.nodes + NodeView((0, 1, 2, 3)) + + """ + + def filter_lines(lines): + for line in lines: + try: + line = line.decode("ascii") + except UnicodeDecodeError as err: + raise NetworkXError("input is not ASCII-encoded") from err + if not isinstance(line, str): + lines = str(lines) + if line and line[-1] == "\n": + line = line[:-1] + yield line + + G = parse_gml_lines(filter_lines(path), label, destringizer) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_gml(lines, label="label", destringizer=None): + """Parse GML graph from a string or iterable. + + Parameters + ---------- + lines : string or iterable of strings + Data in GML format. + + label : string, optional + If not None, the parsed nodes will be renamed according to node + attributes indicated by `label`. Default value: 'label'. + + destringizer : callable, optional + A `destringizer` that recovers values stored as strings in GML. If it + cannot convert a string to a value, a `ValueError` is raised. Default + value : None. + + Returns + ------- + G : NetworkX graph + The parsed graph. + + Raises + ------ + NetworkXError + If the input cannot be parsed. + + See Also + -------- + write_gml, read_gml + + Notes + ----- + This stores nested GML attributes as dictionaries in the NetworkX graph, + node, and edge attribute structures. + + GML files are stored using a 7-bit ASCII encoding with any extended + ASCII characters (iso8859-1) appearing as HTML character entities. + Without specifying a `stringizer`/`destringizer`, the code is capable of + writing `int`/`float`/`str`/`dict`/`list` data as required by the GML + specification. For writing other data types, and for reading data other + than `str` you need to explicitly supply a `stringizer`/`destringizer`. + + For additional documentation on the GML file format, please see the + `GML url `_. + + See the module docstring :mod:`networkx.readwrite.gml` for more details. + """ + + def decode_line(line): + if isinstance(line, bytes): + try: + line.decode("ascii") + except UnicodeDecodeError as err: + raise NetworkXError("input is not ASCII-encoded") from err + if not isinstance(line, str): + line = str(line) + return line + + def filter_lines(lines): + if isinstance(lines, str): + lines = decode_line(lines) + lines = lines.splitlines() + yield from lines + else: + for line in lines: + line = decode_line(line) + if line and line[-1] == "\n": + line = line[:-1] + if line.find("\n") != -1: + raise NetworkXError("input line contains newline") + yield line + + G = parse_gml_lines(filter_lines(lines), label, destringizer) + return G + + +class Pattern(Enum): + """encodes the index of each token-matching pattern in `tokenize`.""" + + KEYS = 0 + REALS = 1 + INTS = 2 + STRINGS = 3 + DICT_START = 4 + DICT_END = 5 + COMMENT_WHITESPACE = 6 + + +class Token(NamedTuple): + category: Pattern + value: Any + line: int + position: int + + +LIST_START_VALUE = "_networkx_list_start" + + +def parse_gml_lines(lines, label, destringizer): + """Parse GML `lines` into a graph.""" + + def tokenize(): + patterns = [ + r"[A-Za-z][0-9A-Za-z_]*\b", # keys + # reals + r"[+-]?(?:[0-9]*\.[0-9]+|[0-9]+\.[0-9]*|INF)(?:[Ee][+-]?[0-9]+)?", + r"[+-]?[0-9]+", # ints + r'".*?"', # strings + r"\[", # dict start + r"\]", # dict end + r"#.*$|\s+", # comments and whitespaces + ] + tokens = re.compile("|".join(f"({pattern})" for pattern in patterns)) + lineno = 0 + multilines = [] # entries spread across multiple lines + for line in lines: + pos = 0 + + # deal with entries spread across multiple lines + # + # should we actually have to deal with escaped "s then do it here + if multilines: + multilines.append(line.strip()) + if line[-1] == '"': # closing multiline entry + # multiline entries will be joined by space. cannot + # reintroduce newlines as this will break the tokenizer + line = " ".join(multilines) + multilines = [] + else: # continued multiline entry + lineno += 1 + continue + else: + if line.count('"') == 1: # opening multiline entry + if line.strip()[0] != '"' and line.strip()[-1] != '"': + # since we expect something like key "value", the " should not be found at ends + # otherwise tokenizer will pick up the formatting mistake. + multilines = [line.rstrip()] + lineno += 1 + continue + + length = len(line) + + while pos < length: + match = tokens.match(line, pos) + if match is None: + m = f"cannot tokenize {line[pos:]} at ({lineno + 1}, {pos + 1})" + raise NetworkXError(m) + for i in range(len(patterns)): + group = match.group(i + 1) + if group is not None: + if i == 0: # keys + value = group.rstrip() + elif i == 1: # reals + value = float(group) + elif i == 2: # ints + value = int(group) + else: + value = group + if i != 6: # comments and whitespaces + yield Token(Pattern(i), value, lineno + 1, pos + 1) + pos += len(group) + break + lineno += 1 + yield Token(None, None, lineno + 1, 1) # EOF + + def unexpected(curr_token, expected): + category, value, lineno, pos = curr_token + value = repr(value) if value is not None else "EOF" + raise NetworkXError(f"expected {expected}, found {value} at ({lineno}, {pos})") + + def consume(curr_token, category, expected): + if curr_token.category == category: + return next(tokens) + unexpected(curr_token, expected) + + def parse_kv(curr_token): + dct = defaultdict(list) + while curr_token.category == Pattern.KEYS: + key = curr_token.value + curr_token = next(tokens) + category = curr_token.category + if category == Pattern.REALS or category == Pattern.INTS: + value = curr_token.value + curr_token = next(tokens) + elif category == Pattern.STRINGS: + value = unescape(curr_token.value[1:-1]) + if destringizer: + try: + value = destringizer(value) + except ValueError: + pass + # Special handling for empty lists and tuples + if value == "()": + value = () + if value == "[]": + value = [] + curr_token = next(tokens) + elif category == Pattern.DICT_START: + curr_token, value = parse_dict(curr_token) + else: + # Allow for string convertible id and label values + if key in ("id", "label", "source", "target"): + try: + # String convert the token value + value = unescape(str(curr_token.value)) + if destringizer: + try: + value = destringizer(value) + except ValueError: + pass + curr_token = next(tokens) + except Exception: + msg = ( + "an int, float, string, '[' or string" + + " convertible ASCII value for node id or label" + ) + unexpected(curr_token, msg) + # Special handling for nan and infinity. Since the gml language + # defines unquoted strings as keys, the numeric and string branches + # are skipped and we end up in this special branch, so we need to + # convert the current token value to a float for NAN and plain INF. + # +/-INF are handled in the pattern for 'reals' in tokenize(). This + # allows labels and values to be nan or infinity, but not keys. + elif curr_token.value in {"NAN", "INF"}: + value = float(curr_token.value) + curr_token = next(tokens) + else: # Otherwise error out + unexpected(curr_token, "an int, float, string or '['") + dct[key].append(value) + + def clean_dict_value(value): + if not isinstance(value, list): + return value + if len(value) == 1: + return value[0] + if value[0] == LIST_START_VALUE: + return value[1:] + return value + + dct = {key: clean_dict_value(value) for key, value in dct.items()} + return curr_token, dct + + def parse_dict(curr_token): + # dict start + curr_token = consume(curr_token, Pattern.DICT_START, "'['") + # dict contents + curr_token, dct = parse_kv(curr_token) + # dict end + curr_token = consume(curr_token, Pattern.DICT_END, "']'") + return curr_token, dct + + def parse_graph(): + curr_token, dct = parse_kv(next(tokens)) + if curr_token.category is not None: # EOF + unexpected(curr_token, "EOF") + if "graph" not in dct: + raise NetworkXError("input contains no graph") + graph = dct["graph"] + if isinstance(graph, list): + raise NetworkXError("input contains more than one graph") + return graph + + tokens = tokenize() + graph = parse_graph() + + directed = graph.pop("directed", False) + multigraph = graph.pop("multigraph", False) + if not multigraph: + G = nx.DiGraph() if directed else nx.Graph() + else: + G = nx.MultiDiGraph() if directed else nx.MultiGraph() + graph_attr = {k: v for k, v in graph.items() if k not in ("node", "edge")} + G.graph.update(graph_attr) + + def pop_attr(dct, category, attr, i): + try: + return dct.pop(attr) + except KeyError as err: + raise NetworkXError(f"{category} #{i} has no {attr!r} attribute") from err + + nodes = graph.get("node", []) + mapping = {} + node_labels = set() + for i, node in enumerate(nodes if isinstance(nodes, list) else [nodes]): + id = pop_attr(node, "node", "id", i) + if id in G: + raise NetworkXError(f"node id {id!r} is duplicated") + if label is not None and label != "id": + node_label = pop_attr(node, "node", label, i) + if node_label in node_labels: + raise NetworkXError(f"node label {node_label!r} is duplicated") + node_labels.add(node_label) + mapping[id] = node_label + G.add_node(id, **node) + + edges = graph.get("edge", []) + for i, edge in enumerate(edges if isinstance(edges, list) else [edges]): + source = pop_attr(edge, "edge", "source", i) + target = pop_attr(edge, "edge", "target", i) + if source not in G: + raise NetworkXError(f"edge #{i} has undefined source {source!r}") + if target not in G: + raise NetworkXError(f"edge #{i} has undefined target {target!r}") + if not multigraph: + if not G.has_edge(source, target): + G.add_edge(source, target, **edge) + else: + arrow = "->" if directed else "--" + msg = f"edge #{i} ({source!r}{arrow}{target!r}) is duplicated" + raise nx.NetworkXError(msg) + else: + key = edge.pop("key", None) + if key is not None and G.has_edge(source, target, key): + arrow = "->" if directed else "--" + msg = f"edge #{i} ({source!r}{arrow}{target!r}, {key!r})" + msg2 = 'Hint: If multigraph add "multigraph 1" to file header.' + raise nx.NetworkXError(msg + " is duplicated\n" + msg2) + G.add_edge(source, target, key, **edge) + + if label is not None and label != "id": + G = nx.relabel_nodes(G, mapping) + return G + + +def literal_stringizer(value): + """Convert a `value` to a Python literal in GML representation. + + Parameters + ---------- + value : object + The `value` to be converted to GML representation. + + Returns + ------- + rep : string + A double-quoted Python literal representing value. Unprintable + characters are replaced by XML character references. + + Raises + ------ + ValueError + If `value` cannot be converted to GML. + + Notes + ----- + The original value can be recovered using the + :func:`networkx.readwrite.gml.literal_destringizer` function. + """ + + def stringize(value): + if isinstance(value, int | bool) or value is None: + if value is True: # GML uses 1/0 for boolean values. + buf.write(str(1)) + elif value is False: + buf.write(str(0)) + else: + buf.write(str(value)) + elif isinstance(value, str): + text = repr(value) + if text[0] != "u": + try: + value.encode("latin1") + except UnicodeEncodeError: + text = "u" + text + buf.write(text) + elif isinstance(value, float | complex | str | bytes): + buf.write(repr(value)) + elif isinstance(value, list): + buf.write("[") + first = True + for item in value: + if not first: + buf.write(",") + else: + first = False + stringize(item) + buf.write("]") + elif isinstance(value, tuple): + if len(value) > 1: + buf.write("(") + first = True + for item in value: + if not first: + buf.write(",") + else: + first = False + stringize(item) + buf.write(")") + elif value: + buf.write("(") + stringize(value[0]) + buf.write(",)") + else: + buf.write("()") + elif isinstance(value, dict): + buf.write("{") + first = True + for key, value in value.items(): + if not first: + buf.write(",") + else: + first = False + stringize(key) + buf.write(":") + stringize(value) + buf.write("}") + elif isinstance(value, set): + buf.write("{") + first = True + for item in value: + if not first: + buf.write(",") + else: + first = False + stringize(item) + buf.write("}") + else: + msg = f"{value!r} cannot be converted into a Python literal" + raise ValueError(msg) + + buf = StringIO() + stringize(value) + return buf.getvalue() + + +def generate_gml(G, stringizer=None): + r"""Generate a single entry of the graph `G` in GML format. + + Parameters + ---------- + G : NetworkX graph + The graph to be converted to GML. + + stringizer : callable, optional + A `stringizer` which converts non-int/non-float/non-dict values into + strings. If it cannot convert a value into a string, it should raise a + `ValueError` to indicate that. Default value: None. + + Returns + ------- + lines: generator of strings + Lines of GML data. Newlines are not appended. + + Raises + ------ + NetworkXError + If `stringizer` cannot convert a value into a string, or the value to + convert is not a string while `stringizer` is None. + + See Also + -------- + literal_stringizer + + Notes + ----- + Graph attributes named 'directed', 'multigraph', 'node' or + 'edge', node attributes named 'id' or 'label', edge attributes + named 'source' or 'target' (or 'key' if `G` is a multigraph) + are ignored because these attribute names are used to encode the graph + structure. + + GML files are stored using a 7-bit ASCII encoding with any extended + ASCII characters (iso8859-1) appearing as HTML character entities. + Without specifying a `stringizer`/`destringizer`, the code is capable of + writing `int`/`float`/`str`/`dict`/`list` data as required by the GML + specification. For writing other data types, and for reading data other + than `str` you need to explicitly supply a `stringizer`/`destringizer`. + + For additional documentation on the GML file format, please see the + `GML url `_. + + See the module docstring :mod:`networkx.readwrite.gml` for more details. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_node("1") + >>> print("\n".join(nx.generate_gml(G))) + graph [ + node [ + id 0 + label "1" + ] + ] + >>> G = nx.MultiGraph([("a", "b"), ("a", "b")]) + >>> print("\n".join(nx.generate_gml(G))) + graph [ + multigraph 1 + node [ + id 0 + label "a" + ] + node [ + id 1 + label "b" + ] + edge [ + source 0 + target 1 + key 0 + ] + edge [ + source 0 + target 1 + key 1 + ] + ] + """ + valid_keys = re.compile("^[A-Za-z][0-9A-Za-z_]*$") + + def stringize(key, value, ignored_keys, indent, in_list=False): + if not isinstance(key, str): + raise NetworkXError(f"{key!r} is not a string") + if not valid_keys.match(key): + raise NetworkXError(f"{key!r} is not a valid key") + if not isinstance(key, str): + key = str(key) + if key not in ignored_keys: + if isinstance(value, int | bool): + if key == "label": + yield indent + key + ' "' + str(value) + '"' + elif value is True: + # python bool is an instance of int + yield indent + key + " 1" + elif value is False: + yield indent + key + " 0" + # GML only supports signed 32-bit integers + elif value < -(2**31) or value >= 2**31: + yield indent + key + ' "' + str(value) + '"' + else: + yield indent + key + " " + str(value) + elif isinstance(value, float): + text = repr(value).upper() + # GML matches INF to keys, so prepend + to INF. Use repr(float(*)) + # instead of string literal to future proof against changes to repr. + if text == repr(float("inf")).upper(): + text = "+" + text + else: + # GML requires that a real literal contain a decimal point, but + # repr may not output a decimal point when the mantissa is + # integral and hence needs fixing. + epos = text.rfind("E") + if epos != -1 and text.find(".", 0, epos) == -1: + text = text[:epos] + "." + text[epos:] + if key == "label": + yield indent + key + ' "' + text + '"' + else: + yield indent + key + " " + text + elif isinstance(value, dict): + yield indent + key + " [" + next_indent = indent + " " + for key, value in value.items(): + yield from stringize(key, value, (), next_indent) + yield indent + "]" + elif isinstance(value, tuple) and key == "label": + yield indent + key + f" \"({','.join(repr(v) for v in value)})\"" + elif isinstance(value, list | tuple) and key != "label" and not in_list: + if len(value) == 0: + yield indent + key + " " + f'"{value!r}"' + if len(value) == 1: + yield indent + key + " " + f'"{LIST_START_VALUE}"' + for val in value: + yield from stringize(key, val, (), indent, True) + else: + if stringizer: + try: + value = stringizer(value) + except ValueError as err: + raise NetworkXError( + f"{value!r} cannot be converted into a string" + ) from err + if not isinstance(value, str): + raise NetworkXError(f"{value!r} is not a string") + yield indent + key + ' "' + escape(value) + '"' + + multigraph = G.is_multigraph() + yield "graph [" + + # Output graph attributes + if G.is_directed(): + yield " directed 1" + if multigraph: + yield " multigraph 1" + ignored_keys = {"directed", "multigraph", "node", "edge"} + for attr, value in G.graph.items(): + yield from stringize(attr, value, ignored_keys, " ") + + # Output node data + node_id = dict(zip(G, range(len(G)))) + ignored_keys = {"id", "label"} + for node, attrs in G.nodes.items(): + yield " node [" + yield " id " + str(node_id[node]) + yield from stringize("label", node, (), " ") + for attr, value in attrs.items(): + yield from stringize(attr, value, ignored_keys, " ") + yield " ]" + + # Output edge data + ignored_keys = {"source", "target"} + kwargs = {"data": True} + if multigraph: + ignored_keys.add("key") + kwargs["keys"] = True + for e in G.edges(**kwargs): + yield " edge [" + yield " source " + str(node_id[e[0]]) + yield " target " + str(node_id[e[1]]) + if multigraph: + yield from stringize("key", e[2], (), " ") + for attr, value in e[-1].items(): + yield from stringize(attr, value, ignored_keys, " ") + yield " ]" + yield "]" + + +@open_file(1, mode="wb") +def write_gml(G, path, stringizer=None): + """Write a graph `G` in GML format to the file or file handle `path`. + + Parameters + ---------- + G : NetworkX graph + The graph to be converted to GML. + + path : filename or filehandle + The filename or filehandle to write. Files whose names end with .gz or + .bz2 will be compressed. + + stringizer : callable, optional + A `stringizer` which converts non-int/non-float/non-dict values into + strings. If it cannot convert a value into a string, it should raise a + `ValueError` to indicate that. Default value: None. + + Raises + ------ + NetworkXError + If `stringizer` cannot convert a value into a string, or the value to + convert is not a string while `stringizer` is None. + + See Also + -------- + read_gml, generate_gml + literal_stringizer + + Notes + ----- + Graph attributes named 'directed', 'multigraph', 'node' or + 'edge', node attributes named 'id' or 'label', edge attributes + named 'source' or 'target' (or 'key' if `G` is a multigraph) + are ignored because these attribute names are used to encode the graph + structure. + + GML files are stored using a 7-bit ASCII encoding with any extended + ASCII characters (iso8859-1) appearing as HTML character entities. + Without specifying a `stringizer`/`destringizer`, the code is capable of + writing `int`/`float`/`str`/`dict`/`list` data as required by the GML + specification. For writing other data types, and for reading data other + than `str` you need to explicitly supply a `stringizer`/`destringizer`. + + Note that while we allow non-standard GML to be read from a file, we make + sure to write GML format. In particular, underscores are not allowed in + attribute names. + For additional documentation on the GML file format, please see the + `GML url `_. + + See the module docstring :mod:`networkx.readwrite.gml` for more details. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_gml(G, "test.gml") + + Filenames ending in .gz or .bz2 will be compressed. + + >>> nx.write_gml(G, "test.gml.gz") + """ + for line in generate_gml(G, stringizer): + path.write((line + "\n").encode("ascii")) diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/graph6.py b/venv/lib/python3.10/site-packages/networkx/readwrite/graph6.py new file mode 100644 index 0000000000000000000000000000000000000000..5e2a30aa7918f760abc0a5c868a04ae3a793a4d3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/graph6.py @@ -0,0 +1,416 @@ +# Original author: D. Eppstein, UC Irvine, August 12, 2003. +# The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. +"""Functions for reading and writing graphs in the *graph6* format. + +The *graph6* file format is suitable for small graphs or large dense +graphs. For large sparse graphs, use the *sparse6* format. + +For more information, see the `graph6`_ homepage. + +.. _graph6: http://users.cecs.anu.edu.au/~bdm/data/formats.html + +""" +from itertools import islice + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils import not_implemented_for, open_file + +__all__ = ["from_graph6_bytes", "read_graph6", "to_graph6_bytes", "write_graph6"] + + +def _generate_graph6_bytes(G, nodes, header): + """Yield bytes in the graph6 encoding of a graph. + + `G` is an undirected simple graph. `nodes` is the list of nodes for + which the node-induced subgraph will be encoded; if `nodes` is the + list of all nodes in the graph, the entire graph will be + encoded. `header` is a Boolean that specifies whether to generate + the header ``b'>>graph6<<'`` before the remaining data. + + This function generates `bytes` objects in the following order: + + 1. the header (if requested), + 2. the encoding of the number of nodes, + 3. each character, one-at-a-time, in the encoding of the requested + node-induced subgraph, + 4. a newline character. + + This function raises :exc:`ValueError` if the graph is too large for + the graph6 format (that is, greater than ``2 ** 36`` nodes). + + """ + n = len(G) + if n >= 2**36: + raise ValueError( + "graph6 is only defined if number of nodes is less than 2 ** 36" + ) + if header: + yield b">>graph6<<" + for d in n_to_data(n): + yield str.encode(chr(d + 63)) + # This generates the same as `(v in G[u] for u, v in combinations(G, 2))`, + # but in "column-major" order instead of "row-major" order. + bits = (nodes[j] in G[nodes[i]] for j in range(1, n) for i in range(j)) + chunk = list(islice(bits, 6)) + while chunk: + d = sum(b << 5 - i for i, b in enumerate(chunk)) + yield str.encode(chr(d + 63)) + chunk = list(islice(bits, 6)) + yield b"\n" + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_graph6_bytes(bytes_in): + """Read a simple undirected graph in graph6 format from bytes. + + Parameters + ---------- + bytes_in : bytes + Data in graph6 format, without a trailing newline. + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If bytes_in is unable to be parsed in graph6 format + + ValueError + If any character ``c`` in bytes_in does not satisfy + ``63 <= ord(c) < 127``. + + Examples + -------- + >>> G = nx.from_graph6_bytes(b"A_") + >>> sorted(G.edges()) + [(0, 1)] + + See Also + -------- + read_graph6, write_graph6 + + References + ---------- + .. [1] Graph6 specification + + + """ + + def bits(): + """Returns sequence of individual bits from 6-bit-per-value + list of data values.""" + for d in data: + for i in [5, 4, 3, 2, 1, 0]: + yield (d >> i) & 1 + + if bytes_in.startswith(b">>graph6<<"): + bytes_in = bytes_in[10:] + + data = [c - 63 for c in bytes_in] + if any(c > 63 for c in data): + raise ValueError("each input character must be in range(63, 127)") + + n, data = data_to_n(data) + nd = (n * (n - 1) // 2 + 5) // 6 + if len(data) != nd: + raise NetworkXError( + f"Expected {n * (n - 1) // 2} bits but got {len(data) * 6} in graph6" + ) + + G = nx.Graph() + G.add_nodes_from(range(n)) + for (i, j), b in zip(((i, j) for j in range(1, n) for i in range(j)), bits()): + if b: + G.add_edge(i, j) + + return G + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def to_graph6_bytes(G, nodes=None, header=True): + """Convert a simple undirected graph to bytes in graph6 format. + + Parameters + ---------- + G : Graph (undirected) + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by ``G.nodes()`` is used. + + header: bool + If True add '>>graph6<<' bytes to head of data. + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + ValueError + If the graph has at least ``2 ** 36`` nodes; the graph6 format + is only defined for graphs of order less than ``2 ** 36``. + + Examples + -------- + >>> nx.to_graph6_bytes(nx.path_graph(2)) + b'>>graph6< + + """ + if nodes is not None: + G = G.subgraph(nodes) + H = nx.convert_node_labels_to_integers(G) + nodes = sorted(H.nodes()) + return b"".join(_generate_graph6_bytes(H, nodes, header)) + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_graph6(path): + """Read simple undirected graphs in graph6 format from path. + + Parameters + ---------- + path : file or string + File or filename to write. + + Returns + ------- + G : Graph or list of Graphs + If the file contains multiple lines then a list of graphs is returned + + Raises + ------ + NetworkXError + If the string is unable to be parsed in graph6 format + + Examples + -------- + You can read a graph6 file by giving the path to the file:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(delete=False) as f: + ... _ = f.write(b">>graph6<>> list(G.edges()) + [(0, 1)] + + You can also read a graph6 file by giving an open file-like object:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile() as f: + ... _ = f.write(b">>graph6<>> list(G.edges()) + [(0, 1)] + + See Also + -------- + from_graph6_bytes, write_graph6 + + References + ---------- + .. [1] Graph6 specification + + + """ + glist = [] + for line in path: + line = line.strip() + if not len(line): + continue + glist.append(from_graph6_bytes(line)) + if len(glist) == 1: + return glist[0] + else: + return glist + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@open_file(1, mode="wb") +def write_graph6(G, path, nodes=None, header=True): + """Write a simple undirected graph to a path in graph6 format. + + Parameters + ---------- + G : Graph (undirected) + + path : str + The path naming the file to which to write the graph. + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by ``G.nodes()`` is used. + + header: bool + If True add '>>graph6<<' string to head of data + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + ValueError + If the graph has at least ``2 ** 36`` nodes; the graph6 format + is only defined for graphs of order less than ``2 ** 36``. + + Examples + -------- + You can write a graph6 file by giving the path to a file:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(delete=False) as f: + ... nx.write_graph6(nx.path_graph(2), f.name) + ... _ = f.seek(0) + ... print(f.read()) + b'>>graph6< + + """ + return write_graph6_file(G, path, nodes=nodes, header=header) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def write_graph6_file(G, f, nodes=None, header=True): + """Write a simple undirected graph to a file-like object in graph6 format. + + Parameters + ---------- + G : Graph (undirected) + + f : file-like object + The file to write. + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by ``G.nodes()`` is used. + + header: bool + If True add '>>graph6<<' string to head of data + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + ValueError + If the graph has at least ``2 ** 36`` nodes; the graph6 format + is only defined for graphs of order less than ``2 ** 36``. + + Examples + -------- + You can write a graph6 file by giving an open file-like object:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile() as f: + ... nx.write_graph6(nx.path_graph(2), f) + ... _ = f.seek(0) + ... print(f.read()) + b'>>graph6< + + """ + if nodes is not None: + G = G.subgraph(nodes) + H = nx.convert_node_labels_to_integers(G) + nodes = sorted(H.nodes()) + for b in _generate_graph6_bytes(H, nodes, header): + f.write(b) + + +def data_to_n(data): + """Read initial one-, four- or eight-unit value from graph6 + integer sequence. + + Return (value, rest of seq.)""" + if data[0] <= 62: + return data[0], data[1:] + if data[1] <= 62: + return (data[1] << 12) + (data[2] << 6) + data[3], data[4:] + return ( + (data[2] << 30) + + (data[3] << 24) + + (data[4] << 18) + + (data[5] << 12) + + (data[6] << 6) + + data[7], + data[8:], + ) + + +def n_to_data(n): + """Convert an integer to one-, four- or eight-unit graph6 sequence. + + This function is undefined if `n` is not in ``range(2 ** 36)``. + + """ + if n <= 62: + return [n] + elif n <= 258047: + return [63, (n >> 12) & 0x3F, (n >> 6) & 0x3F, n & 0x3F] + else: # if n <= 68719476735: + return [ + 63, + 63, + (n >> 30) & 0x3F, + (n >> 24) & 0x3F, + (n >> 18) & 0x3F, + (n >> 12) & 0x3F, + (n >> 6) & 0x3F, + n & 0x3F, + ] diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/graphml.py b/venv/lib/python3.10/site-packages/networkx/readwrite/graphml.py new file mode 100644 index 0000000000000000000000000000000000000000..0b05e03a3b44ef21b2ffa06d4577413993e60c1d --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/graphml.py @@ -0,0 +1,1052 @@ +""" +******* +GraphML +******* +Read and write graphs in GraphML format. + +.. warning:: + + This parser uses the standard xml library present in Python, which is + insecure - see :external+python:mod:`xml` for additional information. + Only parse GraphML files you trust. + +This implementation does not support mixed graphs (directed and unidirected +edges together), hyperedges, nested graphs, or ports. + +"GraphML is a comprehensive and easy-to-use file format for graphs. It +consists of a language core to describe the structural properties of a +graph and a flexible extension mechanism to add application-specific +data. Its main features include support of + + * directed, undirected, and mixed graphs, + * hypergraphs, + * hierarchical graphs, + * graphical representations, + * references to external data, + * application-specific attribute data, and + * light-weight parsers. + +Unlike many other file formats for graphs, GraphML does not use a +custom syntax. Instead, it is based on XML and hence ideally suited as +a common denominator for all kinds of services generating, archiving, +or processing graphs." + +http://graphml.graphdrawing.org/ + +Format +------ +GraphML is an XML format. See +http://graphml.graphdrawing.org/specification.html for the specification and +http://graphml.graphdrawing.org/primer/graphml-primer.html +for examples. +""" +import warnings +from collections import defaultdict + +import networkx as nx +from networkx.utils import open_file + +__all__ = [ + "write_graphml", + "read_graphml", + "generate_graphml", + "write_graphml_xml", + "write_graphml_lxml", + "parse_graphml", + "GraphMLWriter", + "GraphMLReader", +] + + +@open_file(1, mode="wb") +def write_graphml_xml( + G, + path, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + edge_id_from_attribute=None, +): + """Write G in GraphML XML format to path + + Parameters + ---------- + G : graph + A networkx graph + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + encoding : string (optional) + Encoding for text data. + prettyprint : bool (optional) + If True use line breaks and indenting in output XML. + infer_numeric_types : boolean + Determine if numeric types should be generalized. + For example, if edges have both int and float 'weight' attributes, + we infer in GraphML that both are floats. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. + edge_id_from_attribute : dict key (optional) + If provided, the graphml edge id is set by looking up the corresponding + edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data, + the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_graphml(G, "test.graphml") + + Notes + ----- + This implementation does not support mixed graphs (directed + and unidirected edges together) hyperedges, nested graphs, or ports. + """ + writer = GraphMLWriter( + encoding=encoding, + prettyprint=prettyprint, + infer_numeric_types=infer_numeric_types, + named_key_ids=named_key_ids, + edge_id_from_attribute=edge_id_from_attribute, + ) + writer.add_graph_element(G) + writer.dump(path) + + +@open_file(1, mode="wb") +def write_graphml_lxml( + G, + path, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + edge_id_from_attribute=None, +): + """Write G in GraphML XML format to path + + This function uses the LXML framework and should be faster than + the version using the xml library. + + Parameters + ---------- + G : graph + A networkx graph + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + encoding : string (optional) + Encoding for text data. + prettyprint : bool (optional) + If True use line breaks and indenting in output XML. + infer_numeric_types : boolean + Determine if numeric types should be generalized. + For example, if edges have both int and float 'weight' attributes, + we infer in GraphML that both are floats. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. + edge_id_from_attribute : dict key (optional) + If provided, the graphml edge id is set by looking up the corresponding + edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data, + the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_graphml_lxml(G, "fourpath.graphml") + + Notes + ----- + This implementation does not support mixed graphs (directed + and unidirected edges together) hyperedges, nested graphs, or ports. + """ + try: + import lxml.etree as lxmletree + except ImportError: + return write_graphml_xml( + G, + path, + encoding, + prettyprint, + infer_numeric_types, + named_key_ids, + edge_id_from_attribute, + ) + + writer = GraphMLWriterLxml( + path, + graph=G, + encoding=encoding, + prettyprint=prettyprint, + infer_numeric_types=infer_numeric_types, + named_key_ids=named_key_ids, + edge_id_from_attribute=edge_id_from_attribute, + ) + writer.dump() + + +def generate_graphml( + G, + encoding="utf-8", + prettyprint=True, + named_key_ids=False, + edge_id_from_attribute=None, +): + """Generate GraphML lines for G + + Parameters + ---------- + G : graph + A networkx graph + encoding : string (optional) + Encoding for text data. + prettyprint : bool (optional) + If True use line breaks and indenting in output XML. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. + edge_id_from_attribute : dict key (optional) + If provided, the graphml edge id is set by looking up the corresponding + edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data, + the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> linefeed = chr(10) # linefeed = \n + >>> s = linefeed.join(nx.generate_graphml(G)) + >>> for line in nx.generate_graphml(G): # doctest: +SKIP + ... print(line) + + Notes + ----- + This implementation does not support mixed graphs (directed and unidirected + edges together) hyperedges, nested graphs, or ports. + """ + writer = GraphMLWriter( + encoding=encoding, + prettyprint=prettyprint, + named_key_ids=named_key_ids, + edge_id_from_attribute=edge_id_from_attribute, + ) + writer.add_graph_element(G) + yield from str(writer).splitlines() + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_graphml(path, node_type=str, edge_key_type=int, force_multigraph=False): + """Read graph in GraphML format from path. + + Parameters + ---------- + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + + node_type: Python type (default: str) + Convert node ids to this type + + edge_key_type: Python type (default: int) + Convert graphml edge ids to this type. Multigraphs use id as edge key. + Non-multigraphs add to edge attribute dict with name "id". + + force_multigraph : bool (default: False) + If True, return a multigraph with edge keys. If False (the default) + return a multigraph when multiedges are in the graph. + + Returns + ------- + graph: NetworkX graph + If parallel edges are present or `force_multigraph=True` then + a MultiGraph or MultiDiGraph is returned. Otherwise a Graph/DiGraph. + The returned graph is directed if the file indicates it should be. + + Notes + ----- + Default node and edge attributes are not propagated to each node and edge. + They can be obtained from `G.graph` and applied to node and edge attributes + if desired using something like this: + + >>> default_color = G.graph["node_default"]["color"] # doctest: +SKIP + >>> for node, data in G.nodes(data=True): # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + >>> default_color = G.graph["edge_default"]["color"] # doctest: +SKIP + >>> for u, v, data in G.edges(data=True): # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + + This implementation does not support mixed graphs (directed and unidirected + edges together), hypergraphs, nested graphs, or ports. + + For multigraphs the GraphML edge "id" will be used as the edge + key. If not specified then they "key" attribute will be used. If + there is no "key" attribute a default NetworkX multigraph edge key + will be provided. + + Files with the yEd "yfiles" extension can be read. The type of the node's + shape is preserved in the `shape_type` node attribute. + + yEd compressed files ("file.graphmlz" extension) can be read by renaming + the file to "file.graphml.gz". + + """ + reader = GraphMLReader(node_type, edge_key_type, force_multigraph) + # need to check for multiple graphs + glist = list(reader(path=path)) + if len(glist) == 0: + # If no graph comes back, try looking for an incomplete header + header = b'' + path.seek(0) + old_bytes = path.read() + new_bytes = old_bytes.replace(b"", header) + glist = list(reader(string=new_bytes)) + if len(glist) == 0: + raise nx.NetworkXError("file not successfully read as graphml") + return glist[0] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_graphml( + graphml_string, node_type=str, edge_key_type=int, force_multigraph=False +): + """Read graph in GraphML format from string. + + Parameters + ---------- + graphml_string : string + String containing graphml information + (e.g., contents of a graphml file). + + node_type: Python type (default: str) + Convert node ids to this type + + edge_key_type: Python type (default: int) + Convert graphml edge ids to this type. Multigraphs use id as edge key. + Non-multigraphs add to edge attribute dict with name "id". + + force_multigraph : bool (default: False) + If True, return a multigraph with edge keys. If False (the default) + return a multigraph when multiedges are in the graph. + + + Returns + ------- + graph: NetworkX graph + If no parallel edges are found a Graph or DiGraph is returned. + Otherwise a MultiGraph or MultiDiGraph is returned. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> linefeed = chr(10) # linefeed = \n + >>> s = linefeed.join(nx.generate_graphml(G)) + >>> H = nx.parse_graphml(s) + + Notes + ----- + Default node and edge attributes are not propagated to each node and edge. + They can be obtained from `G.graph` and applied to node and edge attributes + if desired using something like this: + + >>> default_color = G.graph["node_default"]["color"] # doctest: +SKIP + >>> for node, data in G.nodes(data=True): # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + >>> default_color = G.graph["edge_default"]["color"] # doctest: +SKIP + >>> for u, v, data in G.edges(data=True): # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + + This implementation does not support mixed graphs (directed and unidirected + edges together), hypergraphs, nested graphs, or ports. + + For multigraphs the GraphML edge "id" will be used as the edge + key. If not specified then they "key" attribute will be used. If + there is no "key" attribute a default NetworkX multigraph edge key + will be provided. + + """ + reader = GraphMLReader(node_type, edge_key_type, force_multigraph) + # need to check for multiple graphs + glist = list(reader(string=graphml_string)) + if len(glist) == 0: + # If no graph comes back, try looking for an incomplete header + header = '' + new_string = graphml_string.replace("", header) + glist = list(reader(string=new_string)) + if len(glist) == 0: + raise nx.NetworkXError("file not successfully read as graphml") + return glist[0] + + +class GraphML: + NS_GRAPHML = "http://graphml.graphdrawing.org/xmlns" + NS_XSI = "http://www.w3.org/2001/XMLSchema-instance" + # xmlns:y="http://www.yworks.com/xml/graphml" + NS_Y = "http://www.yworks.com/xml/graphml" + SCHEMALOCATION = " ".join( + [ + "http://graphml.graphdrawing.org/xmlns", + "http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd", + ] + ) + + def construct_types(self): + types = [ + (int, "integer"), # for Gephi GraphML bug + (str, "yfiles"), + (str, "string"), + (int, "int"), + (int, "long"), + (float, "float"), + (float, "double"), + (bool, "boolean"), + ] + + # These additions to types allow writing numpy types + try: + import numpy as np + except: + pass + else: + # prepend so that python types are created upon read (last entry wins) + types = [ + (np.float64, "float"), + (np.float32, "float"), + (np.float16, "float"), + (np.int_, "int"), + (np.int8, "int"), + (np.int16, "int"), + (np.int32, "int"), + (np.int64, "int"), + (np.uint8, "int"), + (np.uint16, "int"), + (np.uint32, "int"), + (np.uint64, "int"), + (np.int_, "int"), + (np.intc, "int"), + (np.intp, "int"), + ] + types + + self.xml_type = dict(types) + self.python_type = dict(reversed(a) for a in types) + + # This page says that data types in GraphML follow Java(TM). + # http://graphml.graphdrawing.org/primer/graphml-primer.html#AttributesDefinition + # true and false are the only boolean literals: + # http://en.wikibooks.org/wiki/Java_Programming/Literals#Boolean_Literals + convert_bool = { + # We use data.lower() in actual use. + "true": True, + "false": False, + # Include integer strings for convenience. + "0": False, + 0: False, + "1": True, + 1: True, + } + + def get_xml_type(self, key): + """Wrapper around the xml_type dict that raises a more informative + exception message when a user attempts to use data of a type not + supported by GraphML.""" + try: + return self.xml_type[key] + except KeyError as err: + raise TypeError( + f"GraphML does not support type {key} as data values." + ) from err + + +class GraphMLWriter(GraphML): + def __init__( + self, + graph=None, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + edge_id_from_attribute=None, + ): + self.construct_types() + from xml.etree.ElementTree import Element + + self.myElement = Element + + self.infer_numeric_types = infer_numeric_types + self.prettyprint = prettyprint + self.named_key_ids = named_key_ids + self.edge_id_from_attribute = edge_id_from_attribute + self.encoding = encoding + self.xml = self.myElement( + "graphml", + { + "xmlns": self.NS_GRAPHML, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + }, + ) + self.keys = {} + self.attributes = defaultdict(list) + self.attribute_types = defaultdict(set) + + if graph is not None: + self.add_graph_element(graph) + + def __str__(self): + from xml.etree.ElementTree import tostring + + if self.prettyprint: + self.indent(self.xml) + s = tostring(self.xml).decode(self.encoding) + return s + + def attr_type(self, name, scope, value): + """Infer the attribute type of data named name. Currently this only + supports inference of numeric types. + + If self.infer_numeric_types is false, type is used. Otherwise, pick the + most general of types found across all values with name and scope. This + means edges with data named 'weight' are treated separately from nodes + with data named 'weight'. + """ + if self.infer_numeric_types: + types = self.attribute_types[(name, scope)] + + if len(types) > 1: + types = {self.get_xml_type(t) for t in types} + if "string" in types: + return str + elif "float" in types or "double" in types: + return float + else: + return int + else: + return list(types)[0] + else: + return type(value) + + def get_key(self, name, attr_type, scope, default): + keys_key = (name, attr_type, scope) + try: + return self.keys[keys_key] + except KeyError: + if self.named_key_ids: + new_id = name + else: + new_id = f"d{len(list(self.keys))}" + + self.keys[keys_key] = new_id + key_kwargs = { + "id": new_id, + "for": scope, + "attr.name": name, + "attr.type": attr_type, + } + key_element = self.myElement("key", **key_kwargs) + # add subelement for data default value if present + if default is not None: + default_element = self.myElement("default") + default_element.text = str(default) + key_element.append(default_element) + self.xml.insert(0, key_element) + return new_id + + def add_data(self, name, element_type, value, scope="all", default=None): + """ + Make a data element for an edge or a node. Keep a log of the + type in the keys table. + """ + if element_type not in self.xml_type: + raise nx.NetworkXError( + f"GraphML writer does not support {element_type} as data values." + ) + keyid = self.get_key(name, self.get_xml_type(element_type), scope, default) + data_element = self.myElement("data", key=keyid) + data_element.text = str(value) + return data_element + + def add_attributes(self, scope, xml_obj, data, default): + """Appends attribute data to edges or nodes, and stores type information + to be added later. See add_graph_element. + """ + for k, v in data.items(): + self.attribute_types[(str(k), scope)].add(type(v)) + self.attributes[xml_obj].append([k, v, scope, default.get(k)]) + + def add_nodes(self, G, graph_element): + default = G.graph.get("node_default", {}) + for node, data in G.nodes(data=True): + node_element = self.myElement("node", id=str(node)) + self.add_attributes("node", node_element, data, default) + graph_element.append(node_element) + + def add_edges(self, G, graph_element): + if G.is_multigraph(): + for u, v, key, data in G.edges(data=True, keys=True): + edge_element = self.myElement( + "edge", + source=str(u), + target=str(v), + id=str(data.get(self.edge_id_from_attribute)) + if self.edge_id_from_attribute + and self.edge_id_from_attribute in data + else str(key), + ) + default = G.graph.get("edge_default", {}) + self.add_attributes("edge", edge_element, data, default) + graph_element.append(edge_element) + else: + for u, v, data in G.edges(data=True): + if self.edge_id_from_attribute and self.edge_id_from_attribute in data: + # select attribute to be edge id + edge_element = self.myElement( + "edge", + source=str(u), + target=str(v), + id=str(data.get(self.edge_id_from_attribute)), + ) + else: + # default: no edge id + edge_element = self.myElement("edge", source=str(u), target=str(v)) + default = G.graph.get("edge_default", {}) + self.add_attributes("edge", edge_element, data, default) + graph_element.append(edge_element) + + def add_graph_element(self, G): + """ + Serialize graph G in GraphML to the stream. + """ + if G.is_directed(): + default_edge_type = "directed" + else: + default_edge_type = "undirected" + + graphid = G.graph.pop("id", None) + if graphid is None: + graph_element = self.myElement("graph", edgedefault=default_edge_type) + else: + graph_element = self.myElement( + "graph", edgedefault=default_edge_type, id=graphid + ) + default = {} + data = { + k: v + for (k, v) in G.graph.items() + if k not in ["node_default", "edge_default"] + } + self.add_attributes("graph", graph_element, data, default) + self.add_nodes(G, graph_element) + self.add_edges(G, graph_element) + + # self.attributes contains a mapping from XML Objects to a list of + # data that needs to be added to them. + # We postpone processing in order to do type inference/generalization. + # See self.attr_type + for xml_obj, data in self.attributes.items(): + for k, v, scope, default in data: + xml_obj.append( + self.add_data( + str(k), self.attr_type(k, scope, v), str(v), scope, default + ) + ) + self.xml.append(graph_element) + + def add_graphs(self, graph_list): + """Add many graphs to this GraphML document.""" + for G in graph_list: + self.add_graph_element(G) + + def dump(self, stream): + from xml.etree.ElementTree import ElementTree + + if self.prettyprint: + self.indent(self.xml) + document = ElementTree(self.xml) + document.write(stream, encoding=self.encoding, xml_declaration=True) + + def indent(self, elem, level=0): + # in-place prettyprint formatter + i = "\n" + level * " " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + self.indent(elem, level + 1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + +class IncrementalElement: + """Wrapper for _IncrementalWriter providing an Element like interface. + + This wrapper does not intend to be a complete implementation but rather to + deal with those calls used in GraphMLWriter. + """ + + def __init__(self, xml, prettyprint): + self.xml = xml + self.prettyprint = prettyprint + + def append(self, element): + self.xml.write(element, pretty_print=self.prettyprint) + + +class GraphMLWriterLxml(GraphMLWriter): + def __init__( + self, + path, + graph=None, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + edge_id_from_attribute=None, + ): + self.construct_types() + import lxml.etree as lxmletree + + self.myElement = lxmletree.Element + + self._encoding = encoding + self._prettyprint = prettyprint + self.named_key_ids = named_key_ids + self.edge_id_from_attribute = edge_id_from_attribute + self.infer_numeric_types = infer_numeric_types + + self._xml_base = lxmletree.xmlfile(path, encoding=encoding) + self._xml = self._xml_base.__enter__() + self._xml.write_declaration() + + # We need to have a xml variable that support insertion. This call is + # used for adding the keys to the document. + # We will store those keys in a plain list, and then after the graph + # element is closed we will add them to the main graphml element. + self.xml = [] + self._keys = self.xml + self._graphml = self._xml.element( + "graphml", + { + "xmlns": self.NS_GRAPHML, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + }, + ) + self._graphml.__enter__() + self.keys = {} + self.attribute_types = defaultdict(set) + + if graph is not None: + self.add_graph_element(graph) + + def add_graph_element(self, G): + """ + Serialize graph G in GraphML to the stream. + """ + if G.is_directed(): + default_edge_type = "directed" + else: + default_edge_type = "undirected" + + graphid = G.graph.pop("id", None) + if graphid is None: + graph_element = self._xml.element("graph", edgedefault=default_edge_type) + else: + graph_element = self._xml.element( + "graph", edgedefault=default_edge_type, id=graphid + ) + + # gather attributes types for the whole graph + # to find the most general numeric format needed. + # Then pass through attributes to create key_id for each. + graphdata = { + k: v + for k, v in G.graph.items() + if k not in ("node_default", "edge_default") + } + node_default = G.graph.get("node_default", {}) + edge_default = G.graph.get("edge_default", {}) + # Graph attributes + for k, v in graphdata.items(): + self.attribute_types[(str(k), "graph")].add(type(v)) + for k, v in graphdata.items(): + element_type = self.get_xml_type(self.attr_type(k, "graph", v)) + self.get_key(str(k), element_type, "graph", None) + # Nodes and data + for node, d in G.nodes(data=True): + for k, v in d.items(): + self.attribute_types[(str(k), "node")].add(type(v)) + for node, d in G.nodes(data=True): + for k, v in d.items(): + T = self.get_xml_type(self.attr_type(k, "node", v)) + self.get_key(str(k), T, "node", node_default.get(k)) + # Edges and data + if G.is_multigraph(): + for u, v, ekey, d in G.edges(keys=True, data=True): + for k, v in d.items(): + self.attribute_types[(str(k), "edge")].add(type(v)) + for u, v, ekey, d in G.edges(keys=True, data=True): + for k, v in d.items(): + T = self.get_xml_type(self.attr_type(k, "edge", v)) + self.get_key(str(k), T, "edge", edge_default.get(k)) + else: + for u, v, d in G.edges(data=True): + for k, v in d.items(): + self.attribute_types[(str(k), "edge")].add(type(v)) + for u, v, d in G.edges(data=True): + for k, v in d.items(): + T = self.get_xml_type(self.attr_type(k, "edge", v)) + self.get_key(str(k), T, "edge", edge_default.get(k)) + + # Now add attribute keys to the xml file + for key in self.xml: + self._xml.write(key, pretty_print=self._prettyprint) + + # The incremental_writer writes each node/edge as it is created + incremental_writer = IncrementalElement(self._xml, self._prettyprint) + with graph_element: + self.add_attributes("graph", incremental_writer, graphdata, {}) + self.add_nodes(G, incremental_writer) # adds attributes too + self.add_edges(G, incremental_writer) # adds attributes too + + def add_attributes(self, scope, xml_obj, data, default): + """Appends attribute data.""" + for k, v in data.items(): + data_element = self.add_data( + str(k), self.attr_type(str(k), scope, v), str(v), scope, default.get(k) + ) + xml_obj.append(data_element) + + def __str__(self): + return object.__str__(self) + + def dump(self, stream=None): + self._graphml.__exit__(None, None, None) + self._xml_base.__exit__(None, None, None) + + +# default is lxml is present. +write_graphml = write_graphml_lxml + + +class GraphMLReader(GraphML): + """Read a GraphML document. Produces NetworkX graph objects.""" + + def __init__(self, node_type=str, edge_key_type=int, force_multigraph=False): + self.construct_types() + self.node_type = node_type + self.edge_key_type = edge_key_type + self.multigraph = force_multigraph # If False, test for multiedges + self.edge_ids = {} # dict mapping (u,v) tuples to edge id attributes + + def __call__(self, path=None, string=None): + from xml.etree.ElementTree import ElementTree, fromstring + + if path is not None: + self.xml = ElementTree(file=path) + elif string is not None: + self.xml = fromstring(string) + else: + raise ValueError("Must specify either 'path' or 'string' as kwarg") + (keys, defaults) = self.find_graphml_keys(self.xml) + for g in self.xml.findall(f"{{{self.NS_GRAPHML}}}graph"): + yield self.make_graph(g, keys, defaults) + + def make_graph(self, graph_xml, graphml_keys, defaults, G=None): + # set default graph type + edgedefault = graph_xml.get("edgedefault", None) + if G is None: + if edgedefault == "directed": + G = nx.MultiDiGraph() + else: + G = nx.MultiGraph() + # set defaults for graph attributes + G.graph["node_default"] = {} + G.graph["edge_default"] = {} + for key_id, value in defaults.items(): + key_for = graphml_keys[key_id]["for"] + name = graphml_keys[key_id]["name"] + python_type = graphml_keys[key_id]["type"] + if key_for == "node": + G.graph["node_default"].update({name: python_type(value)}) + if key_for == "edge": + G.graph["edge_default"].update({name: python_type(value)}) + # hyperedges are not supported + hyperedge = graph_xml.find(f"{{{self.NS_GRAPHML}}}hyperedge") + if hyperedge is not None: + raise nx.NetworkXError("GraphML reader doesn't support hyperedges") + # add nodes + for node_xml in graph_xml.findall(f"{{{self.NS_GRAPHML}}}node"): + self.add_node(G, node_xml, graphml_keys, defaults) + # add edges + for edge_xml in graph_xml.findall(f"{{{self.NS_GRAPHML}}}edge"): + self.add_edge(G, edge_xml, graphml_keys) + # add graph data + data = self.decode_data_elements(graphml_keys, graph_xml) + G.graph.update(data) + + # switch to Graph or DiGraph if no parallel edges were found + if self.multigraph: + return G + + G = nx.DiGraph(G) if G.is_directed() else nx.Graph(G) + # add explicit edge "id" from file as attribute in NX graph. + nx.set_edge_attributes(G, values=self.edge_ids, name="id") + return G + + def add_node(self, G, node_xml, graphml_keys, defaults): + """Add a node to the graph.""" + # warn on finding unsupported ports tag + ports = node_xml.find(f"{{{self.NS_GRAPHML}}}port") + if ports is not None: + warnings.warn("GraphML port tag not supported.") + # find the node by id and cast it to the appropriate type + node_id = self.node_type(node_xml.get("id")) + # get data/attributes for node + data = self.decode_data_elements(graphml_keys, node_xml) + G.add_node(node_id, **data) + # get child nodes + if node_xml.attrib.get("yfiles.foldertype") == "group": + graph_xml = node_xml.find(f"{{{self.NS_GRAPHML}}}graph") + self.make_graph(graph_xml, graphml_keys, defaults, G) + + def add_edge(self, G, edge_element, graphml_keys): + """Add an edge to the graph.""" + # warn on finding unsupported ports tag + ports = edge_element.find(f"{{{self.NS_GRAPHML}}}port") + if ports is not None: + warnings.warn("GraphML port tag not supported.") + + # raise error if we find mixed directed and undirected edges + directed = edge_element.get("directed") + if G.is_directed() and directed == "false": + msg = "directed=false edge found in directed graph." + raise nx.NetworkXError(msg) + if (not G.is_directed()) and directed == "true": + msg = "directed=true edge found in undirected graph." + raise nx.NetworkXError(msg) + + source = self.node_type(edge_element.get("source")) + target = self.node_type(edge_element.get("target")) + data = self.decode_data_elements(graphml_keys, edge_element) + # GraphML stores edge ids as an attribute + # NetworkX uses them as keys in multigraphs too if no key + # attribute is specified + edge_id = edge_element.get("id") + if edge_id: + # self.edge_ids is used by `make_graph` method for non-multigraphs + self.edge_ids[source, target] = edge_id + try: + edge_id = self.edge_key_type(edge_id) + except ValueError: # Could not convert. + pass + else: + edge_id = data.get("key") + + if G.has_edge(source, target): + # mark this as a multigraph + self.multigraph = True + + # Use add_edges_from to avoid error with add_edge when `'key' in data` + # Note there is only one edge here... + G.add_edges_from([(source, target, edge_id, data)]) + + def decode_data_elements(self, graphml_keys, obj_xml): + """Use the key information to decode the data XML if present.""" + data = {} + for data_element in obj_xml.findall(f"{{{self.NS_GRAPHML}}}data"): + key = data_element.get("key") + try: + data_name = graphml_keys[key]["name"] + data_type = graphml_keys[key]["type"] + except KeyError as err: + raise nx.NetworkXError(f"Bad GraphML data: no key {key}") from err + text = data_element.text + # assume anything with subelements is a yfiles extension + if text is not None and len(list(data_element)) == 0: + if data_type == bool: + # Ignore cases. + # http://docs.oracle.com/javase/6/docs/api/java/lang/ + # Boolean.html#parseBoolean%28java.lang.String%29 + data[data_name] = self.convert_bool[text.lower()] + else: + data[data_name] = data_type(text) + elif len(list(data_element)) > 0: + # Assume yfiles as subelements, try to extract node_label + node_label = None + # set GenericNode's configuration as shape type + gn = data_element.find(f"{{{self.NS_Y}}}GenericNode") + if gn is not None: + data["shape_type"] = gn.get("configuration") + for node_type in ["GenericNode", "ShapeNode", "SVGNode", "ImageNode"]: + pref = f"{{{self.NS_Y}}}{node_type}/{{{self.NS_Y}}}" + geometry = data_element.find(f"{pref}Geometry") + if geometry is not None: + data["x"] = geometry.get("x") + data["y"] = geometry.get("y") + if node_label is None: + node_label = data_element.find(f"{pref}NodeLabel") + shape = data_element.find(f"{pref}Shape") + if shape is not None: + data["shape_type"] = shape.get("type") + if node_label is not None: + data["label"] = node_label.text + + # check all the different types of edges available in yEd. + for edge_type in [ + "PolyLineEdge", + "SplineEdge", + "QuadCurveEdge", + "BezierEdge", + "ArcEdge", + ]: + pref = f"{{{self.NS_Y}}}{edge_type}/{{{self.NS_Y}}}" + edge_label = data_element.find(f"{pref}EdgeLabel") + if edge_label is not None: + break + if edge_label is not None: + data["label"] = edge_label.text + elif text is None: + data[data_name] = "" + return data + + def find_graphml_keys(self, graph_element): + """Extracts all the keys and key defaults from the xml.""" + graphml_keys = {} + graphml_key_defaults = {} + for k in graph_element.findall(f"{{{self.NS_GRAPHML}}}key"): + attr_id = k.get("id") + attr_type = k.get("attr.type") + attr_name = k.get("attr.name") + yfiles_type = k.get("yfiles.type") + if yfiles_type is not None: + attr_name = yfiles_type + attr_type = "yfiles" + if attr_type is None: + attr_type = "string" + warnings.warn(f"No key type for id {attr_id}. Using string") + if attr_name is None: + raise nx.NetworkXError(f"Unknown key for id {attr_id}.") + graphml_keys[attr_id] = { + "name": attr_name, + "type": self.python_type[attr_type], + "for": k.get("for"), + } + # check for "default" sub-element of key element + default = k.find(f"{{{self.NS_GRAPHML}}}default") + if default is not None: + # Handle default values identically to data element values + python_type = graphml_keys[attr_id]["type"] + if python_type == bool: + graphml_key_defaults[attr_id] = self.convert_bool[ + default.text.lower() + ] + else: + graphml_key_defaults[attr_id] = python_type(default.text) + return graphml_keys, graphml_key_defaults diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/leda.py b/venv/lib/python3.10/site-packages/networkx/readwrite/leda.py new file mode 100644 index 0000000000000000000000000000000000000000..9fb57db140081aa65f1d9f91dbcc3fe29faf7cd5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/leda.py @@ -0,0 +1,108 @@ +""" +Read graphs in LEDA format. + +LEDA is a C++ class library for efficient data types and algorithms. + +Format +------ +See http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html + +""" +# Original author: D. Eppstein, UC Irvine, August 12, 2003. +# The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. + +__all__ = ["read_leda", "parse_leda"] + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils import open_file + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_leda(path, encoding="UTF-8"): + """Read graph in LEDA format from path. + + Parameters + ---------- + path : file or string + File or filename to read. Filenames ending in .gz or .bz2 will be + uncompressed. + + Returns + ------- + G : NetworkX graph + + Examples + -------- + G=nx.read_leda('file.leda') + + References + ---------- + .. [1] http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html + """ + lines = (line.decode(encoding) for line in path) + G = parse_leda(lines) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_leda(lines): + """Read graph in LEDA format from string or iterable. + + Parameters + ---------- + lines : string or iterable + Data in LEDA format. + + Returns + ------- + G : NetworkX graph + + Examples + -------- + G=nx.parse_leda(string) + + References + ---------- + .. [1] http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html + """ + if isinstance(lines, str): + lines = iter(lines.split("\n")) + lines = iter( + [ + line.rstrip("\n") + for line in lines + if not (line.startswith(("#", "\n")) or line == "") + ] + ) + for i in range(3): + next(lines) + # Graph + du = int(next(lines)) # -1=directed, -2=undirected + if du == -1: + G = nx.DiGraph() + else: + G = nx.Graph() + + # Nodes + n = int(next(lines)) # number of nodes + node = {} + for i in range(1, n + 1): # LEDA counts from 1 to n + symbol = next(lines).rstrip().strip("|{}| ") + if symbol == "": + symbol = str(i) # use int if no label - could be trouble + node[i] = symbol + + G.add_nodes_from([s for i, s in node.items()]) + + # Edges + m = int(next(lines)) # number of edges + for i in range(m): + try: + s, t, reversal, label = next(lines).split() + except BaseException as err: + raise NetworkXError(f"Too few fields in LEDA.GRAPH edge {i+1}") from err + # BEWARE: no handling of reversal edges + G.add_edge(node[int(s)], node[int(t)], label=label[2:-2]) + return G diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/multiline_adjlist.py b/venv/lib/python3.10/site-packages/networkx/readwrite/multiline_adjlist.py new file mode 100644 index 0000000000000000000000000000000000000000..526b30a9b05526c97abb7c2556b435084fb601d7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/multiline_adjlist.py @@ -0,0 +1,393 @@ +""" +************************* +Multi-line Adjacency List +************************* +Read and write NetworkX graphs as multi-line adjacency lists. + +The multi-line adjacency list format is useful for graphs with +nodes that can be meaningfully represented as strings. With this format +simple edge data can be stored but node or graph data is not. + +Format +------ +The first label in a line is the source node label followed by the node degree +d. The next d lines are target node labels and optional edge data. +That pattern repeats for all nodes in the graph. + +The graph with edges a-b, a-c, d-e can be represented as the following +adjacency list (anything following the # in a line is a comment):: + + # example.multiline-adjlist + a 2 + b + c + d 1 + e +""" + +__all__ = [ + "generate_multiline_adjlist", + "write_multiline_adjlist", + "parse_multiline_adjlist", + "read_multiline_adjlist", +] + +import networkx as nx +from networkx.utils import open_file + + +def generate_multiline_adjlist(G, delimiter=" "): + """Generate a single line of the graph G in multiline adjacency list format. + + Parameters + ---------- + G : NetworkX graph + + delimiter : string, optional + Separator for node labels + + Returns + ------- + lines : string + Lines of data in multiline adjlist format. + + Examples + -------- + >>> G = nx.lollipop_graph(4, 3) + >>> for line in nx.generate_multiline_adjlist(G): + ... print(line) + 0 3 + 1 {} + 2 {} + 3 {} + 1 2 + 2 {} + 3 {} + 2 1 + 3 {} + 3 1 + 4 {} + 4 1 + 5 {} + 5 1 + 6 {} + 6 0 + + See Also + -------- + write_multiline_adjlist, read_multiline_adjlist + """ + if G.is_directed(): + if G.is_multigraph(): + for s, nbrs in G.adjacency(): + nbr_edges = [ + (u, data) + for u, datadict in nbrs.items() + for key, data in datadict.items() + ] + deg = len(nbr_edges) + yield str(s) + delimiter + str(deg) + for u, d in nbr_edges: + if d is None: + yield str(u) + else: + yield str(u) + delimiter + str(d) + else: # directed single edges + for s, nbrs in G.adjacency(): + deg = len(nbrs) + yield str(s) + delimiter + str(deg) + for u, d in nbrs.items(): + if d is None: + yield str(u) + else: + yield str(u) + delimiter + str(d) + else: # undirected + if G.is_multigraph(): + seen = set() # helper dict used to avoid duplicate edges + for s, nbrs in G.adjacency(): + nbr_edges = [ + (u, data) + for u, datadict in nbrs.items() + if u not in seen + for key, data in datadict.items() + ] + deg = len(nbr_edges) + yield str(s) + delimiter + str(deg) + for u, d in nbr_edges: + if d is None: + yield str(u) + else: + yield str(u) + delimiter + str(d) + seen.add(s) + else: # undirected single edges + seen = set() # helper dict used to avoid duplicate edges + for s, nbrs in G.adjacency(): + nbr_edges = [(u, d) for u, d in nbrs.items() if u not in seen] + deg = len(nbr_edges) + yield str(s) + delimiter + str(deg) + for u, d in nbr_edges: + if d is None: + yield str(u) + else: + yield str(u) + delimiter + str(d) + seen.add(s) + + +@open_file(1, mode="wb") +def write_multiline_adjlist(G, path, delimiter=" ", comments="#", encoding="utf-8"): + """Write the graph G in multiline adjacency list format to path + + Parameters + ---------- + G : NetworkX graph + + path : string or file + Filename or file handle to write to. + Filenames ending in .gz or .bz2 will be compressed. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels + + encoding : string, optional + Text encoding. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_multiline_adjlist(G, "test.adjlist") + + The path can be a file handle or a string with the name of the file. If a + file handle is provided, it has to be opened in 'wb' mode. + + >>> fh = open("test.adjlist", "wb") + >>> nx.write_multiline_adjlist(G, fh) + + Filenames ending in .gz or .bz2 will be compressed. + + >>> nx.write_multiline_adjlist(G, "test.adjlist.gz") + + See Also + -------- + read_multiline_adjlist + """ + import sys + import time + + pargs = comments + " ".join(sys.argv) + header = ( + f"{pargs}\n" + + comments + + f" GMT {time.asctime(time.gmtime())}\n" + + comments + + f" {G.name}\n" + ) + path.write(header.encode(encoding)) + + for multiline in generate_multiline_adjlist(G, delimiter): + multiline += "\n" + path.write(multiline.encode(encoding)) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_multiline_adjlist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, edgetype=None +): + """Parse lines of a multiline adjacency list representation of a graph. + + Parameters + ---------- + lines : list or iterator of strings + Input data in multiline adjlist format + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + nodetype : Python type, optional + Convert nodes to this type. + + edgetype : Python type, optional + Convert edges to this type. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels. The default is whitespace. + + Returns + ------- + G: NetworkX graph + The graph corresponding to the lines in multiline adjacency list format. + + Examples + -------- + >>> lines = [ + ... "1 2", + ... "2 {'weight':3, 'name': 'Frodo'}", + ... "3 {}", + ... "2 1", + ... "5 {'weight':6, 'name': 'Saruman'}", + ... ] + >>> G = nx.parse_multiline_adjlist(iter(lines), nodetype=int) + >>> list(G) + [1, 2, 3, 5] + + """ + from ast import literal_eval + + G = nx.empty_graph(0, create_using) + for line in lines: + p = line.find(comments) + if p >= 0: + line = line[:p] + if not line: + continue + try: + (u, deg) = line.strip().split(delimiter) + deg = int(deg) + except BaseException as err: + raise TypeError(f"Failed to read node and degree on line ({line})") from err + if nodetype is not None: + try: + u = nodetype(u) + except BaseException as err: + raise TypeError( + f"Failed to convert node ({u}) to type {nodetype}" + ) from err + G.add_node(u) + for i in range(deg): + while True: + try: + line = next(lines) + except StopIteration as err: + msg = f"Failed to find neighbor for node ({u})" + raise TypeError(msg) from err + p = line.find(comments) + if p >= 0: + line = line[:p] + if line: + break + vlist = line.strip().split(delimiter) + numb = len(vlist) + if numb < 1: + continue # isolated node + v = vlist.pop(0) + data = "".join(vlist) + if nodetype is not None: + try: + v = nodetype(v) + except BaseException as err: + raise TypeError( + f"Failed to convert node ({v}) to type {nodetype}" + ) from err + if edgetype is not None: + try: + edgedata = {"weight": edgetype(data)} + except BaseException as err: + raise TypeError( + f"Failed to convert edge data ({data}) to type {edgetype}" + ) from err + else: + try: # try to evaluate + edgedata = literal_eval(data) + except: + edgedata = {} + G.add_edge(u, v, **edgedata) + + return G + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_multiline_adjlist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + edgetype=None, + encoding="utf-8", +): + """Read graph in multi-line adjacency list format from path. + + Parameters + ---------- + path : string or file + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be uncompressed. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + nodetype : Python type, optional + Convert nodes to this type. + + edgetype : Python type, optional + Convert edge data to this type. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels. The default is whitespace. + + Returns + ------- + G: NetworkX graph + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_multiline_adjlist(G, "test.adjlist") + >>> G = nx.read_multiline_adjlist("test.adjlist") + + The path can be a file or a string with the name of the file. If a + file s provided, it has to be opened in 'rb' mode. + + >>> fh = open("test.adjlist", "rb") + >>> G = nx.read_multiline_adjlist(fh) + + Filenames ending in .gz or .bz2 will be compressed. + + >>> nx.write_multiline_adjlist(G, "test.adjlist.gz") + >>> G = nx.read_multiline_adjlist("test.adjlist.gz") + + The optional nodetype is a function to convert node strings to nodetype. + + For example + + >>> G = nx.read_multiline_adjlist("test.adjlist", nodetype=int) + + will attempt to convert all nodes to integer type. + + The optional edgetype is a function to convert edge data strings to + edgetype. + + >>> G = nx.read_multiline_adjlist("test.adjlist") + + The optional create_using parameter is a NetworkX graph container. + The default is Graph(), an undirected graph. To read the data as + a directed graph use + + >>> G = nx.read_multiline_adjlist("test.adjlist", create_using=nx.DiGraph) + + Notes + ----- + This format does not store graph, node, or edge data. + + See Also + -------- + write_multiline_adjlist + """ + lines = (line.decode(encoding) for line in path) + return parse_multiline_adjlist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + edgetype=edgetype, + ) diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/p2g.py b/venv/lib/python3.10/site-packages/networkx/readwrite/p2g.py new file mode 100644 index 0000000000000000000000000000000000000000..85f07ec84152910f69592c2f811f0022b5be8160 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/p2g.py @@ -0,0 +1,104 @@ +""" +This module provides the following: read and write of p2g format +used in metabolic pathway studies. + +See https://web.archive.org/web/20080626113807/http://www.cs.purdue.edu/homes/koyuturk/pathway/ for a description. + +The summary is included here: + +A file that describes a uniquely labeled graph (with extension ".gr") +format looks like the following: + + +name +3 4 +a +1 2 +b + +c +0 2 + +"name" is simply a description of what the graph corresponds to. The +second line displays the number of nodes and number of edges, +respectively. This sample graph contains three nodes labeled "a", "b", +and "c". The rest of the graph contains two lines for each node. The +first line for a node contains the node label. After the declaration +of the node label, the out-edges of that node in the graph are +provided. For instance, "a" is linked to nodes 1 and 2, which are +labeled "b" and "c", while the node labeled "b" has no outgoing +edges. Observe that node labeled "c" has an outgoing edge to +itself. Indeed, self-loops are allowed. Node index starts from 0. + +""" +import networkx as nx +from networkx.utils import open_file + + +@open_file(1, mode="w") +def write_p2g(G, path, encoding="utf-8"): + """Write NetworkX graph in p2g format. + + Notes + ----- + This format is meant to be used with directed graphs with + possible self loops. + """ + path.write((f"{G.name}\n").encode(encoding)) + path.write((f"{G.order()} {G.size()}\n").encode(encoding)) + nodes = list(G) + # make dictionary mapping nodes to integers + nodenumber = dict(zip(nodes, range(len(nodes)))) + for n in nodes: + path.write((f"{n}\n").encode(encoding)) + for nbr in G.neighbors(n): + path.write((f"{nodenumber[nbr]} ").encode(encoding)) + path.write("\n".encode(encoding)) + + +@open_file(0, mode="r") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_p2g(path, encoding="utf-8"): + """Read graph in p2g format from path. + + Returns + ------- + MultiDiGraph + + Notes + ----- + If you want a DiGraph (with no self loops allowed and no edge data) + use D=nx.DiGraph(read_p2g(path)) + """ + lines = (line.decode(encoding) for line in path) + G = parse_p2g(lines) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_p2g(lines): + """Parse p2g format graph from string or iterable. + + Returns + ------- + MultiDiGraph + """ + description = next(lines).strip() + # are multiedges (parallel edges) allowed? + G = nx.MultiDiGraph(name=description, selfloops=True) + nnodes, nedges = map(int, next(lines).split()) + nodelabel = {} + nbrs = {} + # loop over the nodes keeping track of node labels and out neighbors + # defer adding edges until all node labels are known + for i in range(nnodes): + n = next(lines).strip() + nodelabel[i] = n + G.add_node(n) + nbrs[n] = map(int, next(lines).split()) + # now we know all of the node labels so we can add the edges + # with the correct labels + for n in G: + for nbr in nbrs[n]: + G.add_edge(n, nodelabel[nbr]) + return G diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/pajek.py b/venv/lib/python3.10/site-packages/networkx/readwrite/pajek.py new file mode 100644 index 0000000000000000000000000000000000000000..f148f16208de0d42fbcd52d24affeac98152e1f3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/pajek.py @@ -0,0 +1,286 @@ +""" +***** +Pajek +***** +Read graphs in Pajek format. + +This implementation handles directed and undirected graphs including +those with self loops and parallel edges. + +Format +------ +See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm +for format information. + +""" + +import warnings + +import networkx as nx +from networkx.utils import open_file + +__all__ = ["read_pajek", "parse_pajek", "generate_pajek", "write_pajek"] + + +def generate_pajek(G): + """Generate lines in Pajek graph format. + + Parameters + ---------- + G : graph + A Networkx graph + + References + ---------- + See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm + for format information. + """ + if G.name == "": + name = "NetworkX" + else: + name = G.name + # Apparently many Pajek format readers can't process this line + # So we'll leave it out for now. + # yield '*network %s'%name + + # write nodes with attributes + yield f"*vertices {G.order()}" + nodes = list(G) + # make dictionary mapping nodes to integers + nodenumber = dict(zip(nodes, range(1, len(nodes) + 1))) + for n in nodes: + # copy node attributes and pop mandatory attributes + # to avoid duplication. + na = G.nodes.get(n, {}).copy() + x = na.pop("x", 0.0) + y = na.pop("y", 0.0) + try: + id = int(na.pop("id", nodenumber[n])) + except ValueError as err: + err.args += ( + ( + "Pajek format requires 'id' to be an int()." + " Refer to the 'Relabeling nodes' section." + ), + ) + raise + nodenumber[n] = id + shape = na.pop("shape", "ellipse") + s = " ".join(map(make_qstr, (id, n, x, y, shape))) + # only optional attributes are left in na. + for k, v in na.items(): + if isinstance(v, str) and v.strip() != "": + s += f" {make_qstr(k)} {make_qstr(v)}" + else: + warnings.warn( + f"Node attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}." + ) + yield s + + # write edges with attributes + if G.is_directed(): + yield "*arcs" + else: + yield "*edges" + for u, v, edgedata in G.edges(data=True): + d = edgedata.copy() + value = d.pop("weight", 1.0) # use 1 as default edge value + s = " ".join(map(make_qstr, (nodenumber[u], nodenumber[v], value))) + for k, v in d.items(): + if isinstance(v, str) and v.strip() != "": + s += f" {make_qstr(k)} {make_qstr(v)}" + else: + warnings.warn( + f"Edge attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}." + ) + yield s + + +@open_file(1, mode="wb") +def write_pajek(G, path, encoding="UTF-8"): + """Write graph in Pajek format to path. + + Parameters + ---------- + G : graph + A Networkx graph + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_pajek(G, "test.net") + + Warnings + -------- + Optional node attributes and edge attributes must be non-empty strings. + Otherwise it will not be written into the file. You will need to + convert those attributes to strings if you want to keep them. + + References + ---------- + See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm + for format information. + """ + for line in generate_pajek(G): + line += "\n" + path.write(line.encode(encoding)) + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_pajek(path, encoding="UTF-8"): + """Read graph in Pajek format from path. + + Parameters + ---------- + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be uncompressed. + + Returns + ------- + G : NetworkX MultiGraph or MultiDiGraph. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_pajek(G, "test.net") + >>> G = nx.read_pajek("test.net") + + To create a Graph instead of a MultiGraph use + + >>> G1 = nx.Graph(G) + + References + ---------- + See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm + for format information. + """ + lines = (line.decode(encoding) for line in path) + return parse_pajek(lines) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_pajek(lines): + """Parse Pajek format graph from string or iterable. + + Parameters + ---------- + lines : string or iterable + Data in Pajek format. + + Returns + ------- + G : NetworkX graph + + See Also + -------- + read_pajek + + """ + import shlex + + # multigraph=False + if isinstance(lines, str): + lines = iter(lines.split("\n")) + lines = iter([line.rstrip("\n") for line in lines]) + G = nx.MultiDiGraph() # are multiedges allowed in Pajek? assume yes + labels = [] # in the order of the file, needed for matrix + while lines: + try: + l = next(lines) + except: # EOF + break + if l.lower().startswith("*network"): + try: + label, name = l.split(None, 1) + except ValueError: + # Line was not of the form: *network NAME + pass + else: + G.graph["name"] = name + elif l.lower().startswith("*vertices"): + nodelabels = {} + l, nnodes = l.split() + for i in range(int(nnodes)): + l = next(lines) + try: + splitline = [ + x.decode("utf-8") for x in shlex.split(str(l).encode("utf-8")) + ] + except AttributeError: + splitline = shlex.split(str(l)) + id, label = splitline[0:2] + labels.append(label) + G.add_node(label) + nodelabels[id] = label + G.nodes[label]["id"] = id + try: + x, y, shape = splitline[2:5] + G.nodes[label].update( + {"x": float(x), "y": float(y), "shape": shape} + ) + except: + pass + extra_attr = zip(splitline[5::2], splitline[6::2]) + G.nodes[label].update(extra_attr) + elif l.lower().startswith("*edges") or l.lower().startswith("*arcs"): + if l.lower().startswith("*edge"): + # switch from multidigraph to multigraph + G = nx.MultiGraph(G) + if l.lower().startswith("*arcs"): + # switch to directed with multiple arcs for each existing edge + G = G.to_directed() + for l in lines: + try: + splitline = [ + x.decode("utf-8") for x in shlex.split(str(l).encode("utf-8")) + ] + except AttributeError: + splitline = shlex.split(str(l)) + + if len(splitline) < 2: + continue + ui, vi = splitline[0:2] + u = nodelabels.get(ui, ui) + v = nodelabels.get(vi, vi) + # parse the data attached to this edge and put in a dictionary + edge_data = {} + try: + # there should always be a single value on the edge? + w = splitline[2:3] + edge_data.update({"weight": float(w[0])}) + except: + pass + # if there isn't, just assign a 1 + # edge_data.update({'value':1}) + extra_attr = zip(splitline[3::2], splitline[4::2]) + edge_data.update(extra_attr) + # if G.has_edge(u,v): + # multigraph=True + G.add_edge(u, v, **edge_data) + elif l.lower().startswith("*matrix"): + G = nx.DiGraph(G) + adj_list = ( + (labels[row], labels[col], {"weight": int(data)}) + for (row, line) in enumerate(lines) + for (col, data) in enumerate(line.split()) + if int(data) != 0 + ) + G.add_edges_from(adj_list) + + return G + + +def make_qstr(t): + """Returns the string representation of t. + Add outer double-quotes if the string has a space. + """ + if not isinstance(t, str): + t = str(t) + if " " in t: + t = f'"{t}"' + return t diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/sparse6.py b/venv/lib/python3.10/site-packages/networkx/readwrite/sparse6.py new file mode 100644 index 0000000000000000000000000000000000000000..a705994389594e300b77fa5869e06e9403a57d5b --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/sparse6.py @@ -0,0 +1,376 @@ +# Original author: D. Eppstein, UC Irvine, August 12, 2003. +# The original code at https://www.ics.uci.edu/~eppstein/PADS/ is public domain. +"""Functions for reading and writing graphs in the *sparse6* format. + +The *sparse6* file format is a space-efficient format for large sparse +graphs. For small graphs or large dense graphs, use the *graph6* file +format. + +For more information, see the `sparse6`_ homepage. + +.. _sparse6: https://users.cecs.anu.edu.au/~bdm/data/formats.html + +""" +import networkx as nx +from networkx.exception import NetworkXError +from networkx.readwrite.graph6 import data_to_n, n_to_data +from networkx.utils import not_implemented_for, open_file + +__all__ = ["from_sparse6_bytes", "read_sparse6", "to_sparse6_bytes", "write_sparse6"] + + +def _generate_sparse6_bytes(G, nodes, header): + """Yield bytes in the sparse6 encoding of a graph. + + `G` is an undirected simple graph. `nodes` is the list of nodes for + which the node-induced subgraph will be encoded; if `nodes` is the + list of all nodes in the graph, the entire graph will be + encoded. `header` is a Boolean that specifies whether to generate + the header ``b'>>sparse6<<'`` before the remaining data. + + This function generates `bytes` objects in the following order: + + 1. the header (if requested), + 2. the encoding of the number of nodes, + 3. each character, one-at-a-time, in the encoding of the requested + node-induced subgraph, + 4. a newline character. + + This function raises :exc:`ValueError` if the graph is too large for + the graph6 format (that is, greater than ``2 ** 36`` nodes). + + """ + n = len(G) + if n >= 2**36: + raise ValueError( + "sparse6 is only defined if number of nodes is less than 2 ** 36" + ) + if header: + yield b">>sparse6<<" + yield b":" + for d in n_to_data(n): + yield str.encode(chr(d + 63)) + + k = 1 + while 1 << k < n: + k += 1 + + def enc(x): + """Big endian k-bit encoding of x""" + return [1 if (x & 1 << (k - 1 - i)) else 0 for i in range(k)] + + edges = sorted((max(u, v), min(u, v)) for u, v in G.edges()) + bits = [] + curv = 0 + for v, u in edges: + if v == curv: # current vertex edge + bits.append(0) + bits.extend(enc(u)) + elif v == curv + 1: # next vertex edge + curv += 1 + bits.append(1) + bits.extend(enc(u)) + else: # skip to vertex v and then add edge to u + curv = v + bits.append(1) + bits.extend(enc(v)) + bits.append(0) + bits.extend(enc(u)) + if k < 6 and n == (1 << k) and ((-len(bits)) % 6) >= k and curv < (n - 1): + # Padding special case: small k, n=2^k, + # more than k bits of padding needed, + # current vertex is not (n-1) -- + # appending 1111... would add a loop on (n-1) + bits.append(0) + bits.extend([1] * ((-len(bits)) % 6)) + else: + bits.extend([1] * ((-len(bits)) % 6)) + + data = [ + (bits[i + 0] << 5) + + (bits[i + 1] << 4) + + (bits[i + 2] << 3) + + (bits[i + 3] << 2) + + (bits[i + 4] << 1) + + (bits[i + 5] << 0) + for i in range(0, len(bits), 6) + ] + + for d in data: + yield str.encode(chr(d + 63)) + yield b"\n" + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_sparse6_bytes(string): + """Read an undirected graph in sparse6 format from string. + + Parameters + ---------- + string : string + Data in sparse6 format + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If the string is unable to be parsed in sparse6 format + + Examples + -------- + >>> G = nx.from_sparse6_bytes(b":A_") + >>> sorted(G.edges()) + [(0, 1), (0, 1), (0, 1)] + + See Also + -------- + read_sparse6, write_sparse6 + + References + ---------- + .. [1] Sparse6 specification + + + """ + if string.startswith(b">>sparse6<<"): + string = string[11:] + if not string.startswith(b":"): + raise NetworkXError("Expected leading colon in sparse6") + + chars = [c - 63 for c in string[1:]] + n, data = data_to_n(chars) + k = 1 + while 1 << k < n: + k += 1 + + def parseData(): + """Returns stream of pairs b[i], x[i] for sparse6 format.""" + chunks = iter(data) + d = None # partial data word + dLen = 0 # how many unparsed bits are left in d + + while 1: + if dLen < 1: + try: + d = next(chunks) + except StopIteration: + return + dLen = 6 + dLen -= 1 + b = (d >> dLen) & 1 # grab top remaining bit + + x = d & ((1 << dLen) - 1) # partially built up value of x + xLen = dLen # how many bits included so far in x + while xLen < k: # now grab full chunks until we have enough + try: + d = next(chunks) + except StopIteration: + return + dLen = 6 + x = (x << 6) + d + xLen += 6 + x = x >> (xLen - k) # shift back the extra bits + dLen = xLen - k + yield b, x + + v = 0 + + G = nx.MultiGraph() + G.add_nodes_from(range(n)) + + multigraph = False + for b, x in parseData(): + if b == 1: + v += 1 + # padding with ones can cause overlarge number here + if x >= n or v >= n: + break + elif x > v: + v = x + else: + if G.has_edge(x, v): + multigraph = True + G.add_edge(x, v) + if not multigraph: + G = nx.Graph(G) + return G + + +def to_sparse6_bytes(G, nodes=None, header=True): + """Convert an undirected graph to bytes in sparse6 format. + + Parameters + ---------- + G : Graph (undirected) + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by ``G.nodes()`` is used. + + header: bool + If True add '>>sparse6<<' bytes to head of data. + + Raises + ------ + NetworkXNotImplemented + If the graph is directed. + + ValueError + If the graph has at least ``2 ** 36`` nodes; the sparse6 format + is only defined for graphs of order less than ``2 ** 36``. + + Examples + -------- + >>> nx.to_sparse6_bytes(nx.path_graph(2)) + b'>>sparse6<<:An\\n' + + See Also + -------- + to_sparse6_bytes, read_sparse6, write_sparse6_bytes + + Notes + ----- + The returned bytes end with a newline character. + + The format does not support edge or node labels. + + References + ---------- + .. [1] Graph6 specification + + + """ + if nodes is not None: + G = G.subgraph(nodes) + G = nx.convert_node_labels_to_integers(G, ordering="sorted") + return b"".join(_generate_sparse6_bytes(G, nodes, header)) + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_sparse6(path): + """Read an undirected graph in sparse6 format from path. + + Parameters + ---------- + path : file or string + File or filename to write. + + Returns + ------- + G : Graph/Multigraph or list of Graphs/MultiGraphs + If the file contains multiple lines then a list of graphs is returned + + Raises + ------ + NetworkXError + If the string is unable to be parsed in sparse6 format + + Examples + -------- + You can read a sparse6 file by giving the path to the file:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(delete=False) as f: + ... _ = f.write(b">>sparse6<<:An\\n") + ... _ = f.seek(0) + ... G = nx.read_sparse6(f.name) + >>> list(G.edges()) + [(0, 1)] + + You can also read a sparse6 file by giving an open file-like object:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile() as f: + ... _ = f.write(b">>sparse6<<:An\\n") + ... _ = f.seek(0) + ... G = nx.read_sparse6(f) + >>> list(G.edges()) + [(0, 1)] + + See Also + -------- + read_sparse6, from_sparse6_bytes + + References + ---------- + .. [1] Sparse6 specification + + + """ + glist = [] + for line in path: + line = line.strip() + if not len(line): + continue + glist.append(from_sparse6_bytes(line)) + if len(glist) == 1: + return glist[0] + else: + return glist + + +@not_implemented_for("directed") +@open_file(1, mode="wb") +def write_sparse6(G, path, nodes=None, header=True): + """Write graph G to given path in sparse6 format. + + Parameters + ---------- + G : Graph (undirected) + + path : file or string + File or filename to write + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by G.nodes() is used. + + header: bool + If True add '>>sparse6<<' string to head of data + + Raises + ------ + NetworkXError + If the graph is directed + + Examples + -------- + You can write a sparse6 file by giving the path to the file:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(delete=False) as f: + ... nx.write_sparse6(nx.path_graph(2), f.name) + ... print(f.read()) + b'>>sparse6<<:An\\n' + + You can also write a sparse6 file by giving an open file-like object:: + + >>> with tempfile.NamedTemporaryFile() as f: + ... nx.write_sparse6(nx.path_graph(2), f) + ... _ = f.seek(0) + ... print(f.read()) + b'>>sparse6<<:An\\n' + + See Also + -------- + read_sparse6, from_sparse6_bytes + + Notes + ----- + The format does not support edge or node labels. + + References + ---------- + .. [1] Sparse6 specification + + + """ + if nodes is not None: + G = G.subgraph(nodes) + G = nx.convert_node_labels_to_integers(G, ordering="sorted") + for b in _generate_sparse6_bytes(G, nodes, header): + path.write(b) diff --git a/venv/lib/python3.10/site-packages/networkx/readwrite/text.py b/venv/lib/python3.10/site-packages/networkx/readwrite/text.py new file mode 100644 index 0000000000000000000000000000000000000000..dc182f59866facba02b493dccf74a5ca4126a5a6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/networkx/readwrite/text.py @@ -0,0 +1,950 @@ +""" +Text-based visual representations of graphs +""" +import sys +import warnings +from collections import defaultdict + +import networkx as nx +from networkx.utils import open_file + +__all__ = ["forest_str", "generate_network_text", "write_network_text"] + + +class BaseGlyphs: + @classmethod + def as_dict(cls): + return { + a: getattr(cls, a) + for a in dir(cls) + if not a.startswith("_") and a != "as_dict" + } + + +class AsciiBaseGlyphs(BaseGlyphs): + empty: str = "+" + newtree_last: str = "+-- " + newtree_mid: str = "+-- " + endof_forest: str = " " + within_forest: str = ": " + within_tree: str = "| " + + +class AsciiDirectedGlyphs(AsciiBaseGlyphs): + last: str = "L-> " + mid: str = "|-> " + backedge: str = "<-" + vertical_edge: str = "!" + + +class AsciiUndirectedGlyphs(AsciiBaseGlyphs): + last: str = "L-- " + mid: str = "|-- " + backedge: str = "-" + vertical_edge: str = "|" + + +class UtfBaseGlyphs(BaseGlyphs): + # Notes on available box and arrow characters + # https://en.wikipedia.org/wiki/Box-drawing_character + # https://stackoverflow.com/questions/2701192/triangle-arrow + empty: str = "╙" + newtree_last: str = "╙── " + newtree_mid: str = "╟── " + endof_forest: str = " " + within_forest: str = "╎ " + within_tree: str = "│ " + + +class UtfDirectedGlyphs(UtfBaseGlyphs): + last: str = "└─╼ " + mid: str = "├─╼ " + backedge: str = "╾" + vertical_edge: str = "╽" + + +class UtfUndirectedGlyphs(UtfBaseGlyphs): + last: str = "└── " + mid: str = "├── " + backedge: str = "─" + vertical_edge: str = "│" + + +def generate_network_text( + graph, + with_labels=True, + sources=None, + max_depth=None, + ascii_only=False, + vertical_chains=False, +): + """Generate lines in the "network text" format + + This works via a depth-first traversal of the graph and writing a line for + each unique node encountered. Non-tree edges are written to the right of + each node, and connection to a non-tree edge is indicated with an ellipsis. + This representation works best when the input graph is a forest, but any + graph can be represented. + + This notation is original to networkx, although it is simple enough that it + may be known in existing literature. See #5602 for details. The procedure + is summarized as follows: + + 1. Given a set of source nodes (which can be specified, or automatically + discovered via finding the (strongly) connected components and choosing one + node with minimum degree from each), we traverse the graph in depth first + order. + + 2. Each reachable node will be printed exactly once on it's own line. + + 3. Edges are indicated in one of four ways: + + a. a parent "L-style" connection on the upper left. This corresponds to + a traversal in the directed DFS tree. + + b. a backref "<-style" connection shown directly on the right. For + directed graphs, these are drawn for any incoming edges to a node that + is not a parent edge. For undirected graphs, these are drawn for only + the non-parent edges that have already been represented (The edges that + have not been represented will be handled in the recursive case). + + c. a child "L-style" connection on the lower right. Drawing of the + children are handled recursively. + + d. if ``vertical_chains`` is true, and a parent node only has one child + a "vertical-style" edge is drawn between them. + + 4. The children of each node (wrt the directed DFS tree) are drawn + underneath and to the right of it. In the case that a child node has already + been drawn the connection is replaced with an ellipsis ("...") to indicate + that there is one or more connections represented elsewhere. + + 5. If a maximum depth is specified, an edge to nodes past this maximum + depth will be represented by an ellipsis. + + 6. If a a node has a truthy "collapse" value, then we do not traverse past + that node. + + Parameters + ---------- + graph : nx.DiGraph | nx.Graph + Graph to represent + + with_labels : bool | str + If True will use the "label" attribute of a node to display if it + exists otherwise it will use the node value itself. If given as a + string, then that attribute name will be used instead of "label". + Defaults to True. + + sources : List + Specifies which nodes to start traversal from. Note: nodes that are not + reachable from one of these sources may not be shown. If unspecified, + the minimal set of nodes needed to reach all others will be used. + + max_depth : int | None + The maximum depth to traverse before stopping. Defaults to None. + + ascii_only : Boolean + If True only ASCII characters are used to construct the visualization + + vertical_chains : Boolean + If True, chains of nodes will be drawn vertically when possible. + + Yields + ------ + str : a line of generated text + + Examples + -------- + >>> graph = nx.path_graph(10) + >>> graph.add_node("A") + >>> graph.add_node("B") + >>> graph.add_node("C") + >>> graph.add_node("D") + >>> graph.add_edge(9, "A") + >>> graph.add_edge(9, "B") + >>> graph.add_edge(9, "C") + >>> graph.add_edge("C", "D") + >>> graph.add_edge("C", "E") + >>> graph.add_edge("C", "F") + >>> nx.write_network_text(graph) + ╙── 0 + └── 1 + └── 2 + └── 3 + └── 4 + └── 5 + └── 6 + └── 7 + └── 8 + └── 9 + ├── A + ├── B + └── C + ├── D + ├── E + └── F + >>> nx.write_network_text(graph, vertical_chains=True) + ╙── 0 + │ + 1 + │ + 2 + │ + 3 + │ + 4 + │ + 5 + │ + 6 + │ + 7 + │ + 8 + │ + 9 + ├── A + ├── B + └── C + ├── D + ├── E + └── F + """ + from typing import Any, NamedTuple + + class StackFrame(NamedTuple): + parent: Any + node: Any + indents: list + this_islast: bool + this_vertical: bool + + collapse_attr = "collapse" + + is_directed = graph.is_directed() + + if is_directed: + glyphs = AsciiDirectedGlyphs if ascii_only else UtfDirectedGlyphs + succ = graph.succ + pred = graph.pred + else: + glyphs = AsciiUndirectedGlyphs if ascii_only else UtfUndirectedGlyphs + succ = graph.adj + pred = graph.adj + + if isinstance(with_labels, str): + label_attr = with_labels + elif with_labels: + label_attr = "label" + else: + label_attr = None + + if max_depth == 0: + yield glyphs.empty + " ..." + elif len(graph.nodes) == 0: + yield glyphs.empty + else: + # If the nodes to traverse are unspecified, find the minimal set of + # nodes that will reach the entire graph + if sources is None: + sources = _find_sources(graph) + + # Populate the stack with each: + # 1. parent node in the DFS tree (or None for root nodes), + # 2. the current node in the DFS tree + # 2. a list of indentations indicating depth + # 3. a flag indicating if the node is the final one to be written. + # Reverse the stack so sources are popped in the correct order. + last_idx = len(sources) - 1 + stack = [ + StackFrame(None, node, [], (idx == last_idx), False) + for idx, node in enumerate(sources) + ][::-1] + + num_skipped_children = defaultdict(lambda: 0) + seen_nodes = set() + while stack: + parent, node, indents, this_islast, this_vertical = stack.pop() + + if node is not Ellipsis: + skip = node in seen_nodes + if skip: + # Mark that we skipped a parent's child + num_skipped_children[parent] += 1 + + if this_islast: + # If we reached the last child of a parent, and we skipped + # any of that parents children, then we should emit an + # ellipsis at the end after this. + if num_skipped_children[parent] and parent is not None: + # Append the ellipsis to be emitted last + next_islast = True + try_frame = StackFrame( + node, Ellipsis, indents, next_islast, False + ) + stack.append(try_frame) + + # Redo this frame, but not as a last object + next_islast = False + try_frame = StackFrame( + parent, node, indents, next_islast, this_vertical + ) + stack.append(try_frame) + continue + + if skip: + continue + seen_nodes.add(node) + + if not indents: + # Top level items (i.e. trees in the forest) get different + # glyphs to indicate they are not actually connected + if this_islast: + this_vertical = False + this_prefix = indents + [glyphs.newtree_last] + next_prefix = indents + [glyphs.endof_forest] + else: + this_prefix = indents + [glyphs.newtree_mid] + next_prefix = indents + [glyphs.within_forest] + + else: + # Non-top-level items + if this_vertical: + this_prefix = indents + next_prefix = indents + else: + if this_islast: + this_prefix = indents + [glyphs.last] + next_prefix = indents + [glyphs.endof_forest] + else: + this_prefix = indents + [glyphs.mid] + next_prefix = indents + [glyphs.within_tree] + + if node is Ellipsis: + label = " ..." + suffix = "" + children = [] + else: + if label_attr is not None: + label = str(graph.nodes[node].get(label_attr, node)) + else: + label = str(node) + + # Determine if we want to show the children of this node. + if collapse_attr is not None: + collapse = graph.nodes[node].get(collapse_attr, False) + else: + collapse = False + + # Determine: + # (1) children to traverse into after showing this node. + # (2) parents to immediately show to the right of this node. + if is_directed: + # In the directed case we must show every successor node + # note: it may be skipped later, but we don't have that + # information here. + children = list(succ[node]) + # In the directed case we must show every predecessor + # except for parent we directly traversed from. + handled_parents = {parent} + else: + # Showing only the unseen children results in a more + # concise representation for the undirected case. + children = [ + child for child in succ[node] if child not in seen_nodes + ] + + # In the undirected case, parents are also children, so we + # only need to immediately show the ones we can no longer + # traverse + handled_parents = {*children, parent} + + if max_depth is not None and len(indents) == max_depth - 1: + # Use ellipsis to indicate we have reached maximum depth + if children: + children = [Ellipsis] + handled_parents = {parent} + + if collapse: + # Collapsing a node is the same as reaching maximum depth + if children: + children = [Ellipsis] + handled_parents = {parent} + + # The other parents are other predecessors of this node that + # are not handled elsewhere. + other_parents = [p for p in pred[node] if p not in handled_parents] + if other_parents: + if label_attr is not None: + other_parents_labels = ", ".join( + [ + str(graph.nodes[p].get(label_attr, p)) + for p in other_parents + ] + ) + else: + other_parents_labels = ", ".join( + [str(p) for p in other_parents] + ) + suffix = " ".join(["", glyphs.backedge, other_parents_labels]) + else: + suffix = "" + + # Emit the line for this node, this will be called for each node + # exactly once. + if this_vertical: + yield "".join(this_prefix + [glyphs.vertical_edge]) + + yield "".join(this_prefix + [label, suffix]) + + if vertical_chains: + if is_directed: + num_children = len(set(children)) + else: + num_children = len(set(children) - {parent}) + # The next node can be drawn vertically if it is the only + # remaining child of this node. + next_is_vertical = num_children == 1 + else: + next_is_vertical = False + + # Push children on the stack in reverse order so they are popped in + # the original order. + for idx, child in enumerate(children[::-1]): + next_islast = idx == 0 + try_frame = StackFrame( + node, child, next_prefix, next_islast, next_is_vertical + ) + stack.append(try_frame) + + +@open_file(1, "w") +def write_network_text( + graph, + path=None, + with_labels=True, + sources=None, + max_depth=None, + ascii_only=False, + end="\n", + vertical_chains=False, +): + """Creates a nice text representation of a graph + + This works via a depth-first traversal of the graph and writing a line for + each unique node encountered. Non-tree edges are written to the right of + each node, and connection to a non-tree edge is indicated with an ellipsis. + This representation works best when the input graph is a forest, but any + graph can be represented. + + Parameters + ---------- + graph : nx.DiGraph | nx.Graph + Graph to represent + + path : string or file or callable or None + Filename or file handle for data output. + if a function, then it will be called for each generated line. + if None, this will default to "sys.stdout.write" + + with_labels : bool | str + If True will use the "label" attribute of a node to display if it + exists otherwise it will use the node value itself. If given as a + string, then that attribute name will be used instead of "label". + Defaults to True. + + sources : List + Specifies which nodes to start traversal from. Note: nodes that are not + reachable from one of these sources may not be shown. If unspecified, + the minimal set of nodes needed to reach all others will be used. + + max_depth : int | None + The maximum depth to traverse before stopping. Defaults to None. + + ascii_only : Boolean + If True only ASCII characters are used to construct the visualization + + end : string + The line ending character + + vertical_chains : Boolean + If True, chains of nodes will be drawn vertically when possible. + + Examples + -------- + >>> graph = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph) + >>> nx.write_network_text(graph) + ╙── 0 + ├─╼ 1 + │ ├─╼ 3 + │ └─╼ 4 + └─╼ 2 + ├─╼ 5 + └─╼ 6 + + >>> # A near tree with one non-tree edge + >>> graph.add_edge(5, 1) + >>> nx.write_network_text(graph) + ╙── 0 + ├─╼ 1 ╾ 5 + │ ├─╼ 3 + │ └─╼ 4 + └─╼ 2 + ├─╼ 5 + │ └─╼ ... + └─╼ 6 + + >>> graph = nx.cycle_graph(5) + >>> nx.write_network_text(graph) + ╙── 0 + ├── 1 + │ └── 2 + │ └── 3 + │ └── 4 ─ 0 + └── ... + + >>> graph = nx.cycle_graph(5, nx.DiGraph) + >>> nx.write_network_text(graph, vertical_chains=True) + ╙── 0 ╾ 4 + ╽ + 1 + ╽ + 2 + ╽ + 3 + ╽ + 4 + └─╼ ... + + >>> nx.write_network_text(graph, vertical_chains=True, ascii_only=True) + +-- 0 <- 4 + ! + 1 + ! + 2 + ! + 3 + ! + 4 + L-> ... + + >>> graph = nx.generators.barbell_graph(4, 2) + >>> nx.write_network_text(graph, vertical_chains=False) + ╙── 4 + ├── 5 + │ └── 6 + │ ├── 7 + │ │ ├── 8 ─ 6 + │ │ │ └── 9 ─ 6, 7 + │ │ └── ... + │ └── ... + └── 3 + ├── 0 + │ ├── 1 ─ 3 + │ │ └── 2 ─ 0, 3 + │ └── ... + └── ... + >>> nx.write_network_text(graph, vertical_chains=True) + ╙── 4 + ├── 5 + │ │ + │ 6 + │ ├── 7 + │ │ ├── 8 ─ 6 + │ │ │ │ + │ │ │ 9 ─ 6, 7 + │ │ └── ... + │ └── ... + └── 3 + ├── 0 + │ ├── 1 ─ 3 + │ │ │ + │ │ 2 ─ 0, 3 + │ └── ... + └── ... + + >>> graph = nx.complete_graph(5, create_using=nx.Graph) + >>> nx.write_network_text(graph) + ╙── 0 + ├── 1 + │ ├── 2 ─ 0 + │ │ ├── 3 ─ 0, 1 + │ │ │ └── 4 ─ 0, 1, 2 + │ │ └── ... + │ └── ... + └── ... + + >>> graph = nx.complete_graph(3, create_using=nx.DiGraph) + >>> nx.write_network_text(graph) + ╙── 0 ╾ 1, 2 + ├─╼ 1 ╾ 2 + │ ├─╼ 2 ╾ 0 + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + """ + if path is None: + # The path is unspecified, write to stdout + _write = sys.stdout.write + elif hasattr(path, "write"): + # The path is already an open file + _write = path.write + elif callable(path): + # The path is a custom callable + _write = path + else: + raise TypeError(type(path)) + + for line in generate_network_text( + graph, + with_labels=with_labels, + sources=sources, + max_depth=max_depth, + ascii_only=ascii_only, + vertical_chains=vertical_chains, + ): + _write(line + end) + + +def _find_sources(graph): + """ + Determine a minimal set of nodes such that the entire graph is reachable + """ + # For each connected part of the graph, choose at least + # one node as a starting point, preferably without a parent + if graph.is_directed(): + # Choose one node from each SCC with minimum in_degree + sccs = list(nx.strongly_connected_components(graph)) + # condensing the SCCs forms a dag, the nodes in this graph with + # 0 in-degree correspond to the SCCs from which the minimum set + # of nodes from which all other nodes can be reached. + scc_graph = nx.condensation(graph, sccs) + supernode_to_nodes = {sn: [] for sn in scc_graph.nodes()} + # Note: the order of mapping differs between pypy and cpython + # so we have to loop over graph nodes for consistency + mapping = scc_graph.graph["mapping"] + for n in graph.nodes: + sn = mapping[n] + supernode_to_nodes[sn].append(n) + sources = [] + for sn in scc_graph.nodes(): + if scc_graph.in_degree[sn] == 0: + scc = supernode_to_nodes[sn] + node = min(scc, key=lambda n: graph.in_degree[n]) + sources.append(node) + else: + # For undirected graph, the entire graph will be reachable as + # long as we consider one node from every connected component + sources = [ + min(cc, key=lambda n: graph.degree[n]) + for cc in nx.connected_components(graph) + ] + sources = sorted(sources, key=lambda n: graph.degree[n]) + return sources + + +def forest_str(graph, with_labels=True, sources=None, write=None, ascii_only=False): + """Creates a nice utf8 representation of a forest + + This function has been superseded by + :func:`nx.readwrite.text.generate_network_text`, which should be used + instead. + + Parameters + ---------- + graph : nx.DiGraph | nx.Graph + Graph to represent (must be a tree, forest, or the empty graph) + + with_labels : bool + If True will use the "label" attribute of a node to display if it + exists otherwise it will use the node value itself. Defaults to True. + + sources : List + Mainly relevant for undirected forests, specifies which nodes to list + first. If unspecified the root nodes of each tree will be used for + directed forests; for undirected forests this defaults to the nodes + with the smallest degree. + + write : callable + Function to use to write to, if None new lines are appended to + a list and returned. If set to the `print` function, lines will + be written to stdout as they are generated. If specified, + this function will return None. Defaults to None. + + ascii_only : Boolean + If True only ASCII characters are used to construct the visualization + + Returns + ------- + str | None : + utf8 representation of the tree / forest + + Examples + -------- + >>> graph = nx.balanced_tree(r=2, h=3, create_using=nx.DiGraph) + >>> print(nx.forest_str(graph)) + ╙── 0 + ├─╼ 1 + │ ├─╼ 3 + │ │ ├─╼ 7 + │ │ └─╼ 8 + │ └─╼ 4 + │ ├─╼ 9 + │ └─╼ 10 + └─╼ 2 + ├─╼ 5 + │ ├─╼ 11 + │ └─╼ 12 + └─╼ 6 + ├─╼ 13 + └─╼ 14 + + + >>> graph = nx.balanced_tree(r=1, h=2, create_using=nx.Graph) + >>> print(nx.forest_str(graph)) + ╙── 0 + └── 1 + └── 2 + + >>> print(nx.forest_str(graph, ascii_only=True)) + +-- 0 + L-- 1 + L-- 2 + """ + msg = ( + "\nforest_str is deprecated as of version 3.1 and will be removed " + "in version 3.3. Use generate_network_text or write_network_text " + "instead.\n" + ) + warnings.warn(msg, DeprecationWarning) + + if len(graph.nodes) > 0: + if not nx.is_forest(graph): + raise nx.NetworkXNotImplemented("input must be a forest or the empty graph") + + printbuf = [] + if write is None: + _write = printbuf.append + else: + _write = write + + write_network_text( + graph, + _write, + with_labels=with_labels, + sources=sources, + ascii_only=ascii_only, + end="", + ) + + if write is None: + # Only return a string if the custom write function was not specified + return "\n".join(printbuf) + + +def _parse_network_text(lines): + """Reconstructs a graph from a network text representation. + + This is mainly used for testing. Network text is for display, not + serialization, as such this cannot parse all network text representations + because node labels can be ambiguous with the glyphs and indentation used + to represent edge structure. Additionally, there is no way to determine if + disconnected graphs were originally directed or undirected. + + Parameters + ---------- + lines : list or iterator of strings + Input data in network text format + + Returns + ------- + G: NetworkX graph + The graph corresponding to the lines in network text format. + """ + from itertools import chain + from typing import Any, NamedTuple, Union + + class ParseStackFrame(NamedTuple): + node: Any + indent: int + has_vertical_child: int | None + + initial_line_iter = iter(lines) + + is_ascii = None + is_directed = None + + ############## + # Initial Pass + ############## + + # Do an initial pass over the lines to determine what type of graph it is. + # Remember what these lines were, so we can reiterate over them in the + # parsing pass. + initial_lines = [] + try: + first_line = next(initial_line_iter) + except StopIteration: + ... + else: + initial_lines.append(first_line) + # The first character indicates if it is an ASCII or UTF graph + first_char = first_line[0] + if first_char in { + UtfBaseGlyphs.empty, + UtfBaseGlyphs.newtree_mid[0], + UtfBaseGlyphs.newtree_last[0], + }: + is_ascii = False + elif first_char in { + AsciiBaseGlyphs.empty, + AsciiBaseGlyphs.newtree_mid[0], + AsciiBaseGlyphs.newtree_last[0], + }: + is_ascii = True + else: + raise AssertionError(f"Unexpected first character: {first_char}") + + if is_ascii: + directed_glyphs = AsciiDirectedGlyphs.as_dict() + undirected_glyphs = AsciiUndirectedGlyphs.as_dict() + else: + directed_glyphs = UtfDirectedGlyphs.as_dict() + undirected_glyphs = UtfUndirectedGlyphs.as_dict() + + # For both directed / undirected glyphs, determine which glyphs never + # appear as substrings in the other undirected / directed glyphs. Glyphs + # with this property unambiguously indicates if a graph is directed / + # undirected. + directed_items = set(directed_glyphs.values()) + undirected_items = set(undirected_glyphs.values()) + unambiguous_directed_items = [] + for item in directed_items: + other_items = undirected_items + other_supersets = [other for other in other_items if item in other] + if not other_supersets: + unambiguous_directed_items.append(item) + unambiguous_undirected_items = [] + for item in undirected_items: + other_items = directed_items + other_supersets = [other for other in other_items if item in other] + if not other_supersets: + unambiguous_undirected_items.append(item) + + for line in initial_line_iter: + initial_lines.append(line) + if any(item in line for item in unambiguous_undirected_items): + is_directed = False + break + elif any(item in line for item in unambiguous_directed_items): + is_directed = True + break + + if is_directed is None: + # Not enough information to determine, choose undirected by default + is_directed = False + + glyphs = directed_glyphs if is_directed else undirected_glyphs + + # the backedge symbol by itself can be ambiguous, but with spaces around it + # becomes unambiguous. + backedge_symbol = " " + glyphs["backedge"] + " " + + # Reconstruct an iterator over all of the lines. + parsing_line_iter = chain(initial_lines, initial_line_iter) + + ############## + # Parsing Pass + ############## + + edges = [] + nodes = [] + is_empty = None + + noparent = object() # sentinel value + + # keep a stack of previous nodes that could be parents of subsequent nodes + stack = [ParseStackFrame(noparent, -1, None)] + + for line in parsing_line_iter: + if line == glyphs["empty"]: + # If the line is the empty glyph, we are done. + # There shouldn't be anything else after this. + is_empty = True + continue + + if backedge_symbol in line: + # This line has one or more backedges, separate those out + node_part, backedge_part = line.split(backedge_symbol) + backedge_nodes = [u.strip() for u in backedge_part.split(", ")] + # Now the node can be parsed + node_part = node_part.rstrip() + prefix, node = node_part.rsplit(" ", 1) + node = node.strip() + # Add the backedges to the edge list + edges.extend([(u, node) for u in backedge_nodes]) + else: + # No backedge, the tail of this line is the node + prefix, node = line.rsplit(" ", 1) + node = node.strip() + + prev = stack.pop() + + if node in glyphs["vertical_edge"]: + # Previous node is still the previous node, but we know it will + # have exactly one child, which will need to have its nesting level + # adjusted. + modified_prev = ParseStackFrame( + prev.node, + prev.indent, + True, + ) + stack.append(modified_prev) + continue + + # The length of the string before the node characters give us a hint + # about our nesting level. The only case where this doesn't work is + # when there are vertical chains, which is handled explicitly. + indent = len(prefix) + curr = ParseStackFrame(node, indent, None) + + if prev.has_vertical_child: + # In this case we know prev must be the parent of our current line, + # so we don't have to search the stack. (which is good because the + # indentation check wouldn't work in this case). + ... + else: + # If the previous node nesting-level is greater than the current + # nodes nesting-level than the previous node was the end of a path, + # and is not our parent. We can safely pop nodes off the stack + # until we find one with a comparable nesting-level, which is our + # parent. + while curr.indent <= prev.indent: + prev = stack.pop() + + if node == "...": + # The current previous node is no longer a valid parent, + # keep it popped from the stack. + stack.append(prev) + else: + # The previous and current nodes may still be parents, so add them + # back onto the stack. + stack.append(prev) + stack.append(curr) + + # Add the node and the edge to its parent to the node / edge lists. + nodes.append(curr.node) + if prev.node is not noparent: + edges.append((prev.node, curr.node)) + + if is_empty: + # Sanity check + assert len(nodes) == 0 + + # Reconstruct the graph + cls = nx.DiGraph if is_directed else nx.Graph + new = cls() + new.add_nodes_from(nodes) + new.add_edges_from(edges) + return new